from fastapi import Request, Query, Depends, Header, APIRouter, HTTPException
from fastapi.responses import JSONResponse
import pytz
import os
import uuid
from datetime import datetime
from connectors.firebase.firebase import Firebase
from typing import List, Optional, Literal, Tuple

from models.automation_v2 import *

from api.feature.automationV2.src.models import Workflow
from api.feature.automationV2.src import nodes
from api.feature.automationV2.src.engine import NodeFactory


from api.feature.automationV2.src.integrations.database_connection import DatabaseConnection
from api.feature.automationV2.src.integrations.database_logger import DatabaseLogger

import logging
logging.basicConfig(level=logging.INFO)
timezone_utc = pytz.utc

fb = Firebase(host=os.environ.get("FIREBASE_HOST"))

router = APIRouter()

LOGGING_PREFIX = "api_feature_automation_v2"

# Pagination helper functions
def _apply_filters(items: List[dict], filters: List[str]) -> List[dict]:
    """Apply filters to items list"""
    if not filters or len(filters) == 0:
        return items
    
    search_filters = []
    for filter_str in filters:
        if ':' in filter_str:
            field, value = filter_str.split(':', 1)
            if field and value is not None:
                search_filters.append({"field": field, "value": value})
    
    if not search_filters:
        return items
    
    # Group filters by value (for OR search across different fields with same value)
    value_groups = {}
    for sf in search_filters:
        value = sf["value"]
        if value not in value_groups:
            value_groups[value] = []
        value_groups[value].append(sf["field"])
    
    # Apply filters
    filtered = []
    for item in items:
        matches_all_values = True
        for search_value, fields in value_groups.items():
            # Check if searchValue matches in ANY of the fields (OR across fields)
            value_matches = False
            for field in fields:
                # Map "name" to "automation_name" for backward compatibility (same as sorting)
                search_field = field
                if field == 'name':
                    search_field = 'automation_name'
                
                field_value = item.get(search_field)
                if field_value is not None:
                    field_str = str(field_value).lower()
                    value_str = str(search_value).lower()
                    if value_str in field_str:  # Partial match (contains)
                        value_matches = True
                        break
            if not value_matches:
                matches_all_values = False
                break
        if matches_all_values:
            filtered.append(item)
    
    return filtered

@router.post("", response_model=CreateUpdateWorkflowResponse, description="Create a new automation workflow")
async def add_feature_automation_v2(request: WorkflowModel, default_request: Request):
    now_request = datetime.now().utcnow().isoformat()
    try:
        uuidStr = str(uuid.uuid4())
        request.automation_id = uuidStr
        
        # Convert Pydantic model to dictionary for Workflow class
        workflow_dict = request.model_dump()
        
        db_conn = DatabaseConnection()
        database_logger = DatabaseLogger(db_connection=db_conn)
        
        wf = Workflow(uuidStr, workflow_dict, database_logger=database_logger)
        wf.install_automation_context(fb, now_request)

        # Convert node Pydantic model to dict for node factory
        start_node_model = request.nodes[wf.start_node_id]
        start_node = start_node_model.model_dump()
        start_node['node_id'] = wf.start_node_id
        node_factory = NodeFactory()
        node_factory.register_node_type('trigger', nodes.TriggerNode)
        trigger_node = node_factory.create_node(start_node, wf.property_id, database_logger=database_logger)

        # Get datebegin and dateend from the node (now included in Pydantic model)
        datebegin = start_node.get('datebegin')
        dateend = start_node.get('dateend')
        if not datebegin or not dateend:
            raise ValueError("Trigger node must have datebegin and dateend fields")
        
        schedule, schedule_datetime = trigger_node.generate_schedule(datebegin, dateend, wf.settings['timezone'])
        database_logger.generate_trigger(wf.property_id, wf.workflow_id, wf.version, schedule)

        return JSONResponse(status_code=200, content={'status': 'ok', 'automation_id': uuidStr, 'message': f"Automation {uuidStr} setup successfully"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.put("", response_model=CreateUpdateWorkflowResponse, description="Update an existing automation workflow")
async def updaet_add_feature_automation_v2(request: WorkflowModel, default_request: Request):
    now_request = datetime.now().utcnow().isoformat()
    try:
        # Convert Pydantic model to dictionary for Workflow class
        workflow_dict = request.model_dump()
        
        db_conn = DatabaseConnection()
        database_logger = DatabaseLogger(db_connection=db_conn)
        wf = Workflow(request.automation_id, workflow_dict, database_logger=database_logger)

        wf.update_automation_context(fb, now_request)

        return JSONResponse(status_code=200, content={'status': 'ok', 'automation_id': request.automation_id, 'message': f"Automation {request.automation_id} update successfully"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("", response_model=DataResponse, description="Get automation workflow details")
async def get_feature_automation_v2(
    default_request: Request = None,
    automation_id: str = Query('all', description="Automation ID"),
    property_id: str = Query(..., description="Property ID"),
    display_type: str = Query("list", description="Display Type: list or calendar"),
    date_start: str = Query(None, description="Start Date for calendar view in for mat YYYY-MM-DD"),
    date_end: str = Query(None, description="End Date for calendar view in for mat YYYY-MM-DD"),
    status: list = Query(["all"], description="Status filter for calendar view"),
    page: Optional[int] = Query(1, description="Page number for pagination"),
    page_size: Optional[int] = Query(50, description="Number of items per page"),
    start: Optional[int] = Query(None, description="Start index for pagination (0-based)"),
    length: Optional[int] = Query(None, description="Number of items per page (alternative to page_size)"),
    filter: Optional[List[str]] = Query(None, description="Filter parameters in format field:value"),
    sort_by: Optional[Literal['automation_name','createdate', 'lastupdate']] = Query('createdate', description="Sort by field"),
    sort_order: Optional[Literal['asc', 'desc']] = Query('desc', description="Sort order")
    ):
    try:
        if display_type == 'list':
            if automation_id != 'all':
                automation_context = fb.db.reference().child(f"account/{property_id}/automation_v2/{automation_id}").get()
                ordered_automation_desc = [automation_context]
            else:
                automation_context = fb.db.reference().child(f"account/{property_id}/automation_v2").get()
                if automation_context:
                    returnAutomation = []
                    for a in automation_context:
                        au = automation_context[a]
                        if au['display']:
                            au.update({'automation_id': a})
                            returnAutomation.append(au)
                    
                    ordered_automation_desc = sorted(returnAutomation, key=lambda x: x['createdate'], reverse=True)
                    
                else:
                    return JSONResponse(status_code=400, content={'status': 'not fond', 'message': f'Propery {property_id} has no automation flow', 'data': []})
            
            #Time zone display
            for auto in ordered_automation_desc:
                for key in ["createdate", "lastupdate"]:
                    if key in auto and auto[key]:
                        try:
                            # Parse ISO datetime string (assuming UTC)
                            dt = datetime.fromisoformat(auto[key])
                            if dt.tzinfo is None:
                                dt = dt.replace(tzinfo=pytz.UTC)
                            
                            # Convert to GMT+7
                            dt_gmt7 = dt.astimezone(timezone_utc)
                            # Store back as ISO string for readability
                            auto[key] = dt_gmt7.isoformat()
                        except Exception as e:
                            logging.error(f"Error converting {key}: {e}")

            #Add pagination, filter, sort
            if automation_id == 'all':
                # Apply filters
                if filter and len(filter) > 0:
                    ordered_automation_desc = _apply_filters(ordered_automation_desc, filter)
                
                #sorting
                # Map "name" to "automation_name" for backward compatibility
                sort_field = sort_by
                if sort_by == 'name':
                    sort_field = 'automation_name'
                ordered_automation_desc = sorted(ordered_automation_desc, key=lambda x: x.get(sort_field, ''), reverse=(sort_order == 'desc'))
                
                #pagination - support both start/length and page/page_size
                if start is not None and length is not None:
                    start_index = max(0, start)  # Ensure non-negative
                    page_size_used = max(1, length)  # Ensure at least 1
                else:
                    start_index = max(0, (page - 1) * page_size)
                    page_size_used = max(1, page_size)
                
                total = len(ordered_automation_desc)
                end_index = min(start_index + page_size_used, total)  # Ensure we don't exceed array bounds
                paginated_automation = ordered_automation_desc[start_index:end_index]
                
                pagination = {
                    "start": start_index,
                    "length": page_size_used,
                    "total": total,
                    "hasMore": end_index < total
                }
                
                return JSONResponse(status_code=200, content={
                    'status': 'ok', 
                    'data': {
                        'items': paginated_automation,
                        'pagination': pagination
                    },
                    'message':'success'
                })
            else:
                return JSONResponse(status_code=200, content={'status': 'ok', 'data': ordered_automation_desc, 'message':'success'})
        
        elif display_type == 'calendar':
            if not date_start and not date_end:
                return JSONResponse(status_code=400, content={'status': 'not fond', 'message': f'Disply type calendar must provide date_start and date_end', 'data': []})
            
            archiveList = fb.db.reference().child(f"account/{property_id}/automation_archive").get()
            archiveList = [] if not archiveList else archiveList
            
            from connectors.cloudSQL.cloudsql import CloudSQL
            from utility.dashboard.genQuery import GenerateQuery
            import pandas as pd
            genq = GenerateQuery(property_id=property_id)
            csql = CloudSQL("asia-southeast1", os.environ.get("INSTANCE_NAME"))
            csql.create_engine(os.environ.get("POSTGRES_USER"), os.environ.get("POSTGRES_PASSWORD"), os.environ.get("POSTGRES_DB"))
            query = genq.getAutomationCalendarV2(date_start, date_end, status)
            result = csql.query(query)
            df_calendar = pd.DataFrame(result, columns=['datetime_trigger', 'automation_id', 'automation_name', 'status', 'active_status'])
            if df_calendar.empty:
                return JSONResponse(status_code=200, content={'status': 'ok', 'message': 'No data','data': []})
            
            df_calendar['datetime_trigger'] = (
                pd.to_datetime(df_calendar['datetime_trigger'], utc=True).dt.tz_convert('Asia/Bangkok').dt.strftime('%Y-%m-%d %H:%M:%S')                         
            )
            if archiveList:
                df_calendar = df_calendar[~df_calendar['automation_id'].isin(archiveList)]
            df_calendar.sort_values(by='datetime_trigger', inplace=True)
            result_json = df_calendar.to_dict(orient='records')
            return JSONResponse(status_code=200, content={'status': 'ok', 'data': result_json, 'message':'success'})

    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})
    
@router.post("/status", description="Update automation workflow status active/inactive")
async def update_feature_automation_v2_status(request: UpdateAutomationStatusModel, default_request: Request):
    try:
        automationExist = fb.db.reference().child(f"account/{request.property_id}/automation_v2/{request.automation_id}").get(shallow=True)
        if not automationExist:
            return JSONResponse(status_code=400, content={'status': 'not found', 'message': f"Automation ID: {request.automation_id} not fond"})
        fb.db.reference().child(f"account/{request.property_id}/automation_v2/{request.automation_id}/active").set(request.status)
        #Update workflow and trigger manager
        db_conn = DatabaseConnection()
        database_logger = DatabaseLogger(db_connection=db_conn)
        result = database_logger.update_work_flow_status(request.property_id, request.automation_id, request.status)
        result = database_logger.update_trigger_mannager(request.property_id, request.automation_id, request.status)

        return JSONResponse(status_code=200, content={'status': 'ok', 'message': f"Automation {request.automation_id} status update successfully"})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/execute/{property_id}/{automation_id}/{execution_id}", response_model=DataResponse, description="Get automation execution details")
async def feature_automation_v2_get_execute(default_request: Request, automation_id: str,property_id: str,execution_id: str = 'all'):
    try:
        db_conn = DatabaseConnection()
        database_logger = DatabaseLogger(db_connection=db_conn)
        result = database_logger.get_automation_execution(property_id, automation_id, execution_id)
        if result:
            return JSONResponse(status_code=200, content={'status': 'ok', 'data': result, 'message':'success'})
        else:
            return JSONResponse(status_code=200, content={'status': 'ok', 'data': [], 'message':'no data'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.get("/execute/{property_id}/{automation_id}/{execution_id}/data", response_model=DataResponse, description="Get automation execution data details")
async def feature_automation_v2_get_execute_data(default_request: Request, automation_id: str,property_id: str,execution_id: str):
    try:
        db_conn = DatabaseConnection()
        database_logger = DatabaseLogger(db_connection=db_conn)
        result = database_logger.get_execution_data(property_id, automation_id, execution_id)
        if result:
            return JSONResponse(status_code=200, content={'status': 'ok', 'data': result, 'message':'success'})
        else:
            return JSONResponse(status_code=200, content={'status': 'ok', 'data': [], 'message':'no data'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.post("/execute/trigger-manager", description="Trigger all workflows in the schedule to run")
async def feature_automation_v2_trigger_manager(default_request: Request):
    try:
        from api.feature.automationV2.trigger_manager import execute_workflow
        execute_workflow()
        return JSONResponse(status_code=200, content={'status': 'ok', 'message': f'All workflow in this schedual run successfully'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})
    

@router.post("/execute/trigger-waiting", description="Trigger all waiting nodes to run")
async def feature_automation_v2_trigger_waiting(default_request: Request):
    try:
        from api.feature.automationV2.trigger_waiting import execute_workflow_waiting
        execute_workflow_waiting()
        return JSONResponse(status_code=200, content={'status': 'ok', 'message': f'All waiting node in this schedual run successfully'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})

@router.post("/execute/manual", response_model=DataResponse, description="Execute an automation manually")
async def feature_automation_v2_execute_manual(request: AutomationBaseModel, default_request: Request):
    try:
        from api.feature.automationV2.trigger_manual import execute_workflow_manual
        data = execute_workflow_manual(request.property_id, request.automation_id)
        return JSONResponse(status_code=200, content={'status': 'ok', 'data': data, 'message': f'Automation {request.automation_id} run successfully'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        return JSONResponse(status_code=500, content={'status': 'error', 'message': str(e)})