from fastapi import Request, Query, Depends, Header, APIRouter, HTTPException, UploadFile, File, Form
from fastapi.responses import JSONResponse
from utility.function import Function
from connectors.firebase.firebase import Firebase
from utility.authorization import TokenAuthorization
from connectors.cloudStorage.bucketstorage import Storage
import pytz
import os
import uuid
from datetime import datetime, timedelta
from werkzeug.utils import secure_filename
from models.keyword import *
from typing import Optional, List, Literal, Tuple

from utility.keyword import Keywords, BigQueryKeyword

import logging
logging.basicConfig(level=logging.INFO)
timezone_utc = pytz.utc
timezone_bkk = pytz.timezone("Asia/Bangkok")

fb = Firebase(host=os.environ.get("FIREBASE_HOST"))

router = APIRouter()

LOGGING_PREFIX = "api_feature_keyword"

# Pagination helper functions
def _apply_filters(items: List[dict], filters: List[str]) -> List[dict]:
    """Apply filters to items list"""
    if not filters or len(filters) == 0:
        return items
    
    search_filters = []
    for filter_str in filters:
        if ':' in filter_str:
            field, value = filter_str.split(':', 1)
            if field and value is not None:
                search_filters.append({"field": field, "value": value})
    
    if not search_filters:
        return items
    
    # Group filters by value (for OR search across different fields with same value)
    value_groups = {}
    for sf in search_filters:
        value = sf["value"]
        if value not in value_groups:
            value_groups[value] = []
        value_groups[value].append(sf["field"])
    
    # Apply filters
    filtered = []
    for item in items:
        matches_all_values = True
        for search_value, fields in value_groups.items():
            # Check if searchValue matches in ANY of the fields (OR across fields)
            value_matches = False
            for field in fields:
                field_value = item.get(field)
                if field_value is not None:
                    field_str = str(field_value).lower()
                    value_str = str(search_value).lower()
                    if value_str in field_str:  # Partial match (contains)
                        value_matches = True
                        break
            if not value_matches:
                matches_all_values = False
                break
        if matches_all_values:
            filtered.append(item)
    
    return filtered

def _apply_sort(items: List[dict], sort_by: Optional[str], order: Optional[str]) -> List[dict]:
    """Apply sorting to items list"""
    if not sort_by:
        return items
    
    order_desc = order == "desc"
    # Use sorted() to avoid modifying original list
    return sorted(items, key=lambda x: (x.get(sort_by) is None, x.get(sort_by)), reverse=order_desc)

def _apply_pagination(items: List[dict], offset: int, limit: int) -> Tuple[List[dict], dict]:
    """Apply pagination to items list"""
    total = len(items)
    paginated_items = items[offset:offset + limit] if limit > 0 else items[offset:]
    
    pagination = {
        "offset": offset,
        "limit": limit if limit > 0 else total,
        "total": total,
        "hasMore": offset + limit < total if limit > 0 else False
    }
    
    return paginated_items, pagination

@router.get("/available/channel", response_model=DataResponse, description="Get available channels for keyword feature")
async def feature_keyword_get_avilable_channel(
    default_request: Request = None,
    property_id: str = Query(..., description="Property ID"),
):
    try:
        facebook_page_id_list = fb.db.reference(f'property/{property_id}/channel/facebook').get() or {}
        line_channel_id_list = fb.db.reference(f'property/{property_id}/channel/line').get() or {}
        
        fb_result = []
        for page_id, page_info in facebook_page_id_list.items():
            name = (page_info or {}).get("name")
            if name:  # keep only those that have a name
                fb_result.append({"name": name, "id": page_id})
        
        line_result = []
        for page_id, page_info in line_channel_id_list.items():
            name = (page_info or {}).get("name")
            if name:  # keep only those that have a name
                line_result.append({"name": name, "id": page_id})
        
        channel_dict = {
            "facebook_page" : fb_result,
            "facebook_post" : fb_result,
            "facebook_comment" : fb_result,
            "line_chat" : line_result
        }
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': channel_dict, 'message':'Success'})

    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.get("/template_list", response_model=DataResponse, description="Get keyword template list")
async def feature_keyword_get_template_list(default_request: Request):
    try:
        templates = fb.db.reference('template/keyword/').get()
        category_map = Keywords.summarize_minimal(templates)
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': category_map, 'message':'Success'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.get("/template_detail", response_model=DataResponse, description="Get keyword template detail")
async def feature_keyword_get_template_detail(default_request: Request, template_name: str = Query('all', description="Template Name")):
    try:
        if template_name == 'all':
            templates = fb.db.reference('template/keyword/').get()
            if not templates:
                return JSONResponse(status_code=404, content={"status":"error",'message': 'No templates found'})
            return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': templates})
        
        else:
            template = fb.db.reference(f'template/keyword/{template_name}').get()
            if not template:
                return JSONResponse(status_code=404, content={"status":"error",'message': 'Template not found'})
        
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': template})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.post("/check_existing_keyword")
async def feature_keyword_check_existing_keyword(default_request: Request, request: CategoryTemplate):
    try:
        check_exist = fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}').get()
        exists = check_exist is not None
        return JSONResponse(status_code=200, content={"status":"ok",'message': f'Keyword Category: {request.category_name} does not exist for this property', 'data': {'exists': exists}})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))
    
@router.get("/list_all_keywords", response_model=DataResponse, description="Get all keywords for a property")
async def feature_keyword_get_keywords(
    request: Request,
    property_id: str = Query(..., description="Property ID"),
    offset: Optional[int] = Query(0, ge=0, description="Number of records to skip"),
    limit: Optional[int] = Query(20, ge=1, le=100, description="Number of records to return"),
    filter: Optional[List[str]] = Query(None, description="Filter criteria in format field:value"),
    sort_by: Optional[str] = Query(None, description="Field to sort by"),
    order: Optional[Literal["asc", "desc"]] = Query("asc", description="Sort order")
):
    # Initialize default values for error handling
    error_offset = 0
    error_limit = 20
    try:
        error_offset = offset if offset is not None else 0
        error_limit = limit if limit is not None else 20
        
        logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Starting - property_id={property_id}, offset={offset}, limit={limit}")
        # Validate property_id
        if not property_id or property_id.strip() == "":
            logging.error(f"{LOGGING_PREFIX}_list_all_keywords: property_id is empty or missing")
            return JSONResponse(status_code=400, content={
                "status": "error",
                "message": "Property ID is required",
                "data": {
                    "items": [],
                    "pagination": {
                        "offset": offset,
                        "limit": limit,
                        "total": 0,
                        "hasMore": False
                    }
                }
            })
        
        # Debug logging
        logging.info(f"{LOGGING_PREFIX}_list_all_keywords: property_id={property_id}")
        
        # Try primary path: account/{property_id}/keyword/
        firebase_path = f'account/{property_id}/keyword/'
        logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Trying Firebase path={firebase_path}")
        
        existing_keyword_categories = fb.db.reference(firebase_path).get() or {}
        
        # Debug logging
        logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Found {len(existing_keyword_categories)} keyword categories at {firebase_path}")
        
        # If no data found, try alternative path: keyword/ (for backward compatibility)
        if not existing_keyword_categories:
            alternative_path = 'keyword/'
            logging.warning(f"{LOGGING_PREFIX}_list_all_keywords: No data at {firebase_path}, trying alternative path: {alternative_path}")
            existing_keyword_categories = fb.db.reference(alternative_path).get() or {}
            if existing_keyword_categories:
                logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Found {len(existing_keyword_categories)} keyword categories at {alternative_path}")
                firebase_path = alternative_path
        
        if existing_keyword_categories:
            logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Keys: {list(existing_keyword_categories.keys())}")
        else:
            logging.warning(f"{LOGGING_PREFIX}_list_all_keywords: No keywords found at any path")
        
        if not existing_keyword_categories:
            return JSONResponse(status_code=200, content={
                "status": "ok",
                "message": "No keywords found for this property",
                "data": {
                    "items": [],
                    "pagination": {
                        "offset": offset,
                        "limit": limit,
                        "total": 0,
                        "hasMore": False
                    }
                }
            })

        keywords_summary = []
        for key, data in existing_keyword_categories.items():
            if not isinstance(data, dict):
                continue

            name = data.get("name", key)
            createddate = data.get("createddate") or data.get("createdate")
            status = data.get("status", "inactive")

            keywords_summary.append({
                "name": name, 
                "createddate": createddate,
                "status": status,
            })
        
        # Apply filters
        try:
            if filter:
                logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Applying filters: {filter}")
                keywords_summary = _apply_filters(keywords_summary, filter)
                logging.info(f"{LOGGING_PREFIX}_list_all_keywords: After filtering: {len(keywords_summary)} items")
        except Exception as filter_error:
            logging.error(f"{LOGGING_PREFIX}_list_all_keywords: Filter error: {filter_error}")
            # Continue without filtering if filter fails
        
        # Apply sorting (default to createddate desc if no sort_by specified)
        try:
            if sort_by:
                logging.info(f"{LOGGING_PREFIX}_list_all_keywords: Applying sort: {sort_by} {order}")
                keywords_summary = _apply_sort(keywords_summary, sort_by, order)
            else:
                # Default sort by createddate desc
                keywords_summary = sorted(keywords_summary, key=lambda x: x.get('createddate', ''), reverse=True)
            logging.info(f"{LOGGING_PREFIX}_list_all_keywords: After sorting: {len(keywords_summary)} items")
        except Exception as sort_error:
            logging.error(f"{LOGGING_PREFIX}_list_all_keywords: Sort error: {sort_error}")
            # Continue without sorting if sort fails
        
        # Apply pagination
        try:
            items, pagination = _apply_pagination(keywords_summary, offset, limit)
            logging.info(f"{LOGGING_PREFIX}_list_all_keywords: After pagination: {len(items)} items, total: {pagination['total']}")
        except Exception as pagination_error:
            logging.error(f"{LOGGING_PREFIX}_list_all_keywords: Pagination error: {pagination_error}")
            # Return all items if pagination fails
            items = keywords_summary
            pagination = {
                "offset": offset,
                "limit": limit,
                "total": len(keywords_summary),
                "hasMore": False
            }
        
        # Return pagination format
        return JSONResponse(status_code=200, content={
            "status": "ok",
            "message": "Success",
            "data": {
                "items": items,
                "pagination": pagination
            }
        })
    except Exception as e:
        import traceback
        error_traceback = traceback.format_exc()
        logging.error(f"Error parsing {LOGGING_PREFIX}_list_all_keywords: {e}")
        logging.error(f"Error details: {str(e)}")
        logging.error(f"Traceback: {error_traceback}")
        return JSONResponse(status_code=500, content={
            "status": "error",
            "message": f"Internal server error: {str(e)}",
            "data": {
                "items": [],
                "pagination": {
                    "offset": error_offset,
                    "limit": error_limit,
                    "total": 0,
                    "hasMore": False
                }
            }
        })

@router.get("/keyword_category" , response_model=DataResponse, description="Get keyword category details")
async def feature_keyword_get_keyword_category(default_request: Request, property_id: str = Query(..., description="Property ID"), category_name: str = Query(..., description="Category Name")):
    try:
        keyword_category = fb.db.reference(f'account/{property_id}/keyword/{category_name}').get()
        if not keyword_category:
            return JSONResponse(status_code=404, content={"status":"error",'message': 'Keyword category not found', 'data': {}})

        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Success', 'data': keyword_category})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.post("/keyword_category", description="Get keyword category details", response_model=DataResponse)
async def feature_keyword_create_update_keyword_category(default_request: Request, request: CreateCategoryTemplate):
    try:
        check_exist = fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}').get()
        if check_exist:
            return JSONResponse(status_code=400, content={"status":"error","message": f"Keyword Category: {request.category_name} already exists for this property"})
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}').set(request.structure)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/name').set(request.category_name)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/apply_to').set(request.apply_to)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/status').set(request.status)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/level').set("category_1")
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/createdate').set(datetime.now(timezone_utc).strftime("%Y-%m-%d %H:%M:%S"))
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/lastupdate').set(datetime.now(timezone_utc).strftime("%Y-%m-%d %H:%M:%S"))
        return JSONResponse(status_code=200, content={"status":"ok",'message': f'Keyword Category: {request.category_name} created successfully', 'data': {}})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.put("/keyword_category", description="Update keyword category details", response_model=DataResponse)
async def feature_keyword_update_keyword_category(default_request: Request, request: CreateCategoryTemplate):
    try:
        check_exist = fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}').get()
        if not check_exist:
            return JSONResponse(status_code=404, content={"status":"error","message": f"Keyword Category: {request.category_name} does not exist for this property"})
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}').set(request.structure)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/name').set(request.category_name)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/apply_to').set(request.apply_to)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/status').set(request.status)
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/level').set("category_1")
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/lastupdate').set(datetime.now(timezone_utc).strftime("%Y-%m-%d %H:%M:%S"))
        return JSONResponse(status_code=200, content={"status":"ok",'message': f'Keyword Category: {request.category_name} updated successfully', 'data': {}})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.delete("/keyword_category", description="Delete keyword category", response_model=DataResponse)
async def feature_keyword_delete_keyword_category(default_request: Request, request: KeywordName):
    try:
        check_exist = fb.db.reference(f'account/{request.property_id}/keyword/{request.keyword_name}').get()
        if not check_exist:
            return JSONResponse(status_code=404, content={"status": "error", "message": "Keyword doesn't exist for this property"})
        else:
            fb.db.reference(f'account/{request.property_id}/keyword/{request.keyword_name}').delete()
            return JSONResponse(status_code=200, content={"status":"ok",'message': f'Delete keyword category: {request.keyword_name} successfully!'})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))

@router.post("/update/status", description="Update keyword category status", response_model=DataResponse)
async def feature_keyword_update_keyword_status(default_request: Request, request: UpdateCategoryTemplate):
    try:
        fb.db.reference(f'account/{request.property_id}/keyword/{request.category_name}/status').set(request.status)
        return JSONResponse(status_code=200, content={"status":"ok",'message': f'Keyword Category: {request.category_name} status updated to {request.status} successfully', 'data': {}})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))
    
@router.post("/message_labeling", description="Label messages with keywords", response_model=DataResponse)
async def feature_keyword_message_labeling(default_request: Request):
    from connectors.bigquery.bq import BigQuery, Table
    import pandas as pd
    try:
        now = datetime.now(timezone_bkk)
        
        end_local   = now.replace(minute=0, second=0, microsecond=0)
        start_local = end_local - timedelta(hours=2)

        # if your BigQuery field is TIMESTAMP, convert to UTC strings
        fmt = "%Y-%m-%d %H:%M:%S"
        datetime_start = start_local.astimezone(timezone_bkk).strftime(fmt)  # 'YYYY-MM-DD HH:MM:SS'
        datetime_end   = end_local.astimezone(timezone_bkk).strftime(fmt)
        
        bq = BigQuery()
        property_list = list(fb.db.reference(f'property').get().keys())
        for property_id in property_list:
            
            message_schema = Table.tableMessageLabeling()
            bq.ensure_table(f'customer-360-profile.client_{property_id}.message_labeling',message_schema)
            bq.ensure_table(f'customer-360-profile.client_{property_id}.message_labeling_temp',message_schema)

            kw_tree = fb.db.reference(f'account/{property_id}/keyword').get() or {}
            if kw_tree:
                flat_list = Keywords.flatten_keyword_tree_to_list(
                    kw_tree,
                    excluded_fields=("apply_to","createdate","lastupdate","name","status")  # <- your requested exclusions
                )
                
                ## facebook comment
                fb_pageId_list = list(fb.db.reference(f'property/{property_id}/channel/facebook').get().keys())
                apply_to_channel = "facebook_comment"
                if fb_pageId_list:
                    for fb_pageId in fb_pageId_list:
                        schema_dict = {list(d.keys())[0]: list(d.values())[0] for d in flat_list}
                        apply_to_map = Keywords.build_apply_to_map_if_contains_id(fb, property_id, required_id=fb_pageId, channel_key=apply_to_channel)
                        schema_dict = Keywords.filter_schema_by_apply_to(schema_dict, apply_to_map)

                        facebook_comment_query = BigQueryKeyword.facebook_comment_query(property_id,fb_pageId,datetime_start,datetime_end)
                        facebook_comment_df = bq.get_query_df(facebook_comment_query)
                        facebook_comment_df["labeling"] = facebook_comment_df["comment_text"].apply(lambda s: Keywords.label_to_bq(s, schema_dict))
                        facebook_comment_df = facebook_comment_df.drop(columns=['comment_type'])
                        facebook_comment_df = facebook_comment_df.rename(columns={
                            "facebook_name" : "pageName",
                            "ps_id" : "user_id",
                            "comment_text" : "message"
                        })
                        kw_list = list(fb.db.reference(f'account/{property_id}/keyword').get().keys())
                        priority_map = Keywords.load_group_priorities_deep(fb, property_id, groups=kw_list)
                        facebook_comment_df["single_grouping"] = facebook_comment_df["labeling"].apply(
                            lambda row: Keywords.choose_single_group_labels(
                                row,
                                priority_map=priority_map,
                                ascending_priority=True,          # change to False if higher numbers = higher priority
                                limit_to_groups=None,          # or ["purpose"] to emit only purpose
                                drop_excluded=True
                            )
                        )
                        fb_channel_name = fb.db.reference(f'property/{property_id}/channel/facebook/{fb_pageId}/name').get()
                        facebook_comment_df['pageName'] = fb_channel_name
                        bq.delete_data('customer-360-profile',f'client_{property_id}',"message_labeling_temp")
                        bq.load_data_df(f'client_{property_id}',"message_labeling_temp",facebook_comment_df)
                        bq.delete_when_match('customer-360-profile',f'client_{property_id}',"message_labeling",
                                            f'client_{property_id}','message_labeling_temp', 
                                            "ON ori.eventTimeStamp = temp.eventTimeStamp AND ori.user_id = temp.user_id AND ori.eventId = temp.eventId AND ori.eventName = temp.eventName ")
                        bq.load_data_df(f'client_{property_id}',"message_labeling",facebook_comment_df)
                        
                
                ## facebook chat
                facebook_channel_id_list = list(fb.db.reference(f'property/{property_id}/channel/facebook').get().keys())
                apply_to_channel = "facebook_message"
                if facebook_channel_id_list:
                    for facebook_channel_id in facebook_channel_id_list:
                        schema_dict = {list(d.keys())[0]: list(d.values())[0] for d in flat_list}
                        apply_to_map = Keywords.build_apply_to_map_if_contains_id(fb, property_id, required_id=facebook_channel_id, channel_key=apply_to_channel)
                        schema_dict = Keywords.filter_schema_by_apply_to(schema_dict, apply_to_map)
                        
                        facebook_message_query = BigQueryKeyword.facebook_message_query(property_id,facebook_channel_id,datetime_start,datetime_end)
                        fb_chat_df = bq.get_query_df(facebook_message_query)
                        fb_chat_df["labeling"] = fb_chat_df["message"].apply(lambda s: Keywords.label_to_bq(s, schema_dict))
                        fb_channel_name = fb.db.reference(f'property/{property_id}/channel/facebook/{facebook_channel_id}/name').get()
                        fb_chat_df['pageName'] = fb_channel_name
                        fb_chat_df = fb_chat_df.rename(columns={
                            "line_uid" : "user_id"
                        })
                        kw_list = list(fb.db.reference(f'account/{property_id}/keyword').get().keys())
                        priority_map = Keywords.load_group_priorities_deep(fb, property_id, groups=kw_list)
                        fb_chat_df["single_grouping"] = fb_chat_df["labeling"].apply(
                            lambda row: Keywords.choose_single_group_labels(
                                row,
                                priority_map=priority_map,
                                ascending_priority=True,          # change to False if higher numbers = higher priority
                                limit_to_groups=None,          # or ["purpose"] to emit only purpose
                                drop_excluded=True
                            )
                        )
                        bq.delete_data('customer-360-profile',f'client_{property_id}',"message_labeling_temp")
                        bq.load_data_df(f'client_{property_id}',"message_labeling_temp",fb_chat_df)
                        bq.delete_when_match('customer-360-profile',f'client_{property_id}',"message_labeling",
                                            f'client_{property_id}','message_labeling_temp', 
                                            "ON ori.eventTimeStamp = temp.eventTimeStamp AND ori.user_id = temp.user_id AND ori.eventId = temp.eventId AND ori.eventName = temp.eventName ")
                        bq.load_data_df(f'client_{property_id}',"message_labeling",fb_chat_df)
                                    
                ## facebook post
                ## --- future task when there will be post data in BigQuery ----
                
                ## line chat
                line_id_list = list(fb.db.reference(f'property/{property_id}/channel/line').get().keys())
                apply_to_channel = "line_chat"
                if line_id_list:
                    for line_id in line_id_list:
                        schema_dict = {list(d.keys())[0]: list(d.values())[0] for d in flat_list}
                        apply_to_map = Keywords.build_apply_to_map_if_contains_id(fb, property_id, required_id=line_id, channel_key=apply_to_channel)
                        schema_dict = Keywords.filter_schema_by_apply_to(schema_dict, apply_to_map)

                        line_chat_query = BigQueryKeyword.line_chat_query(property_id,line_id,datetime_start,datetime_end)
                        line_chat_df = bq.get_query_df(line_chat_query)
                        line_chat_df["labeling"] = line_chat_df["message"].apply(lambda s: Keywords.label_to_bq(s, schema_dict))
                        line_chat_df['eventTimeStamp'] = pd.to_datetime(line_chat_df['eventTimeStamp'])
                        line_channel_name = fb.db.reference(f'property/{property_id}/channel/line/{line_id}/name').get()
                        line_chat_df['pageName'] = line_channel_name
                        line_chat_df = line_chat_df.rename(columns={
                            "line_uid" : "user_id"
                        })
                        
                        kw_list = list(fb.db.reference(f'account/{property_id}/keyword').get().keys())
                        priority_map = Keywords.load_group_priorities_deep(fb, property_id, groups=kw_list)
                        line_chat_df["single_grouping"] = line_chat_df["labeling"].apply(
                            lambda row: Keywords.choose_single_group_labels(
                                row,
                                priority_map=priority_map,
                                ascending_priority=True,          # change to False if higher numbers = higher priority
                                limit_to_groups=None,          # or ["purpose"] to emit only purpose
                                drop_excluded=True
                            )
                        )
                        
                        bq.delete_data('customer-360-profile',f'client_{property_id}',"message_labeling_temp")
                        bq.load_data_df(f'client_{property_id}',"message_labeling_temp",line_chat_df)
                        bq.delete_when_match('customer-360-profile',f'client_{property_id}',"message_labeling",
                                            f'client_{property_id}','message_labeling_temp', 
                                            "ON ori.eventTimeStamp = temp.eventTimeStamp AND ori.user_id = temp.user_id AND ori.eventId = temp.eventId AND ori.eventName = temp.eventName ")
                        bq.load_data_df(f'client_{property_id}',"message_labeling",line_chat_df)
        return JSONResponse(status_code=200, content={"status":"ok",'message': 'Message labeling completed successfully', 'data': {}})
    except Exception as e:
        logging.error(f"Error parsing {LOGGING_PREFIX}_{default_request.path_params}: {e}")
        raise HTTPException(500, detail=str(e))