from flask import Blueprint, request, Response
from flasgger import swag_from
import logging
import uuid
import pytz
import json
import math
import time
import os

import pandas as pd
from firebase.firebase import Firebase
from collections import Counter
from datetime import datetime, timedelta
from bigquery.bq import BigQuery
from utility.function import Function, Pack
from utility.token import Token
logging.basicConfig(level=logging.INFO)

query = Blueprint('query', __name__, url_prefix='/query')
fb = Firebase(host=os.environ.get("FIREBASE_HOST"))


@query.route('/journey/pre-filter/background', methods=['POST'])
def get_pre_filter_journey_background():
    if request.method == 'POST':
        from bigquery.bq import BigQuery
        import pandas as pd
        bq = BigQuery()
        accounts = fb.db.reference().child("account").get(shallow=True)
        for acc in accounts:
            obj = bq.getDistinctUserProperty(acc)
            fb.db.reference().child(f"account/{acc}/userproperty").set(obj)

        
        return {'status': 'success', 'message': 'Set user property for prefilter done'},200

    return {'status': 'error', 'message': 'Method not allowed'}, 405

@query.route('/journey/pre-filter', methods=['GET'])
def get_pre_filter_journey():
    if request.method == 'GET':
        data = request.args
        for req in ['property_id']:
            if req not in data:
                return {'message': f"{req} is required for pre-filter journey query"}, 400
            
        property_id = data.get("property_id", None)
        userproperty = fb.db.reference().child(f"account/{property_id}/userproperty").get()
        return {'status': "odk", "message":"success", "data": userproperty},200
    
    return {'status': 'error', 'message': 'Method not allowed'}, 405

@query.route('/journey', methods=['GET'])
@query.route('/journey/', methods=['GET'])
def get_journey():
    if request.method == 'GET':
        data = request.args
        for req in ['query_type', 'query', 'property_id', 'start_date', 'end_date']:
            if req not in data:
                return {'message': f"{req} is required for journey query"}, 400

        queryType = data.get("query_type", None)
        query = data.get("query", None)
        property_id = data.get("property_id", None)
        # channel_id = data.get("channel_id", None)
        start_date = data.get("start_date", None)
        end_date = data.get("end_date", None)
        if not queryType or not query:
            return {'message': 'Type and query are required'}, 400
            
        if queryType not in ['facebook', 'line', 'phoneNumber']:
            return {'message': f'{queryType} is invalid query type'}, 400
        
        #Check Cache
        if queryType == 'phoneNumber':
            finalUser = fb.db.reference().child(f"account/{property_id}/profile_temp/phoneNumber/{query}").get(shallow=True)
            if not finalUser:
                return {'message': 'No user found for this query'}, 404
        else:        
            userId = fb.db.reference().child(f"account/{property_id}/query/profile/{query}").get(shallow=True)
            finalUser=None
            if not userId:
                profiles = fb.db.reference().child(f"account/{property_id}/profile").get(shallow=True)
                finalUser=None
                for pro in profiles:
                    userData = fb.db.reference().child(f"account/{property_id}/profile/{pro}").get(shallow=True)
                    if queryType in userData:
                        # Get context data
                        contextData = fb.db.reference().child(f"account/{property_id}/profile/{pro}/{queryType}").get(shallow=True)
                        for ids in contextData:
                            user = fb.db.reference().child(f"account/{property_id}/profile/{pro}/{queryType}/{ids}/id").get()
                            if user == query:
                                finalUser = pro
                                break

                if not finalUser:
                    return {'message': 'No user found for this query'}, 404
            
            else:
                finalUser=userId
            
            # Cache user search
            fb.db.reference().child(f"account/{property_id}/query/profile/{query}").set(finalUser)
        
        from bigquery.bq import BigQuery
        bq = BigQuery()
        data = bq.getJourneyEvent(property_id, finalUser, start_date, end_date)
        
        grouped_events = []

        prev_event_name = None
        count = 0
        timestamps = []

        mappingEventName = {
            "contact_info_submitted_phone": "Contact Info Submitted",
            "user_message": "User Message Sent",
            "click_messenger_ad": "Click Messenger Ad",
        }

        for event in data:
            event_name = event["eventName"]
            referral = event.get("referral")
            timestamp = event["eventTimeStamp"]

            # If referral is not None → break the group
            if referral is not None:
                if prev_event_name is not None:
                    grouped_events.append({
                        "category":  "a",
                        "text": mappingEventName[prev_event_name],
                        "count": count,
                        "start": min(timestamps).strftime("%Y-%m-%d %H:%M:%S"),
                        "end": max(timestamps).strftime("%Y-%m-%d %H:%M:%S")
                    })
                # Save the referral event as its own group
                grouped_events.append({
                    "category":  "a",
                    "text": mappingEventName[event_name],
                    "count": 1,
                    "message": next((i['value'] for i in event['eventProperty'] if i['key'] == 'message'), None),
                    "referral": referral,
                    "start": timestamp.strftime("%Y-%m-%d %H:%M:%S"),
                    "end": timestamp.strftime("%Y-%m-%d %H:%M:%S")
                })
                prev_event_name = None
                count = 0
                timestamps = []
                continue

            # If same as previous, count up
            if event_name == prev_event_name:
                count += 1
                timestamps.append(timestamp)
            else:
                # Save previous group
                if prev_event_name is not None:
                    grouped_events.append({
                        "category":  "a",
                        "text": mappingEventName[prev_event_name],
                        "count": count,
                "start": min(timestamps).strftime("%Y-%m-%d %H:%M:%S"),
                "end": max(timestamps).strftime("%Y-%m-%d %H:%M:%S")
                    })
                prev_event_name = event_name
                count = 1
                timestamps = [timestamp]

        # Don't forget to add the last group
        if prev_event_name is not None:
            grouped_events.append({
                "category":  "a",
                "text": mappingEventName[prev_event_name],
                "count": count,
                "start": min(timestamps).strftime("%Y-%m-%d %H:%M:%S"),
                "end": max(timestamps).strftime("%Y-%m-%d %H:%M:%S")
            })

            return {'status': 'ok', 'data': grouped_events}, 200
        
        return {'status': 'error', 'message': 'No events found for this user'}, 404
    
    return {'status': 'error', 'message': 'Method not allowed'}, 405
        
        
@query.route('/venn', methods=['GET'])
def get_venn_data():
    if request.method == 'GET':
        data = request.args
        property_id = data.get("property_id", None)
        if property_id == None:
            return {'message': f"Property ID is required for journey query"}, 400
        
        # accounts_ref = fb.db.reference(f"account/{property_id}")
        # accounts = accounts_ref.get()
        
        start_date = data.get("start_date", "2025-01-01")
        end_date = data.get("end_date", datetime.now().strftime('%Y-%m-%d'))
        bq = BigQuery()
        profile_obj = bq.getCustomerProfile("client_lbc","customer_profile",start_date, end_date)
        combination_counter = Counter()
        
        # Process each user entry
        for user in profile_obj:
            channels = set()
            for profile in user.get("user_profile", []):
                channel = profile.get("channel")
                if channel in {"facebook", "line", "phoneNumber"}:
                    channels.add(channel)
            combination_counter[tuple(sorted(channels))] += 1
        
        # Step 2: Generate final output format
        result = [
            {
                "name": "facebook",
                "value": sum(v for k, v in combination_counter.items() if "facebook" in k),
                "only": combination_counter.get(("facebook",), 0)
            },
            {
                "name": "line",
                "value": sum(v for k, v in combination_counter.items() if "line" in k),
                "only": combination_counter.get(("line",), 0)
            },
            {
                "name": "telephone",
                "value": sum(v for k, v in combination_counter.items() if "phoneNumber" in k),
                "only": combination_counter.get(("phoneNumber",), 0)
            },
            {
                "name": "facebook & line",
                "sets": ["facebook", "line"],
                "value": combination_counter.get(("facebook", "line"), 0)
            },
            {
                "name": "facebook & telephone",
                "sets": ["facebook", "telephone"],
                "value": combination_counter.get(("facebook", "phoneNumber"), 0)
            },
            {
                "name": "line & telephone",
                "sets": ["line", "telephone"],
                "value": combination_counter.get(("line", "phoneNumber"), 0)
            },
            {
                "name": "facebook & line & telephone",
                "sets": ["facebook", "line", "telephone"],
                "value": combination_counter.get(("facebook", "line", "phoneNumber"), 0)
            }
        ]

        if result:
            return {'status': 'ok', 'data': result}, 200
        
        return {'status': 'error', 'message': 'No Data Found for this property'}, 404
    
    return {'status': 'error', 'message': 'Method not allowed'}, 405


@query.route('/sankey', methods=['GET'])
def get_sankey():
    if request.method == 'GET':
        data = request.args
        for req in ['property_id', 'start_date', 'end_date', 'look_back_start_date', 'look_back_end_date']:
            if req not in data:
                return {'message': f"{req} is required for path exploration query"}, 400

        property_id = data.get("property_id", None)
        # channel_id = data.get("channel_id", None)
        start_date = data.get("start_date", None)
        end_date = data.get("end_date", None)
        look_back_start_date = data.get("look_back_start_date", None)
        look_back_end_date = data.get("look_back_end_date", None)
        
        from bigquery.bq import BigQuery
        import pandas as pd
        import numpy as np
        import plotly.graph_objects as go

        bq = BigQuery()
        data = bq.getSankeyData(property_id, start_date, end_date, look_back_start_date, look_back_end_date)
        df = pd.DataFrame(data).sort_values(by=['user_pseudo_id','eventTimeStamp'], ascending=[False, False])

        df_user_ref_null = set(df[df['ad_title'].isnull()]['user_pseudo_id'].unique())
        df_user_ref = set(df[df['ad_title'].notnull()]['user_pseudo_id'].unique())
        userNonAds = list(df_user_ref_null - df_user_ref)
        # userNonAdsTotal = len(userNonAds)

        #Find last ad
        df_user_with_last_ad = df[['user_pseudo_id', 'ad_title']][(~df['user_pseudo_id'].isin(userNonAds)) & (df['ad_title'].notnull())].drop_duplicates(subset=['user_pseudo_id']).rename(columns={
            "ad_title": "last_ad"
        })

        df = df.merge(df_user_with_last_ad, how='left', on='user_pseudo_id')
        df['ad'] = np.where(df['user_pseudo_id'].isin(userNonAds), 'Organic',df['last_ad'])
        
        # Sankey
        users_who_clicked = set(df[df['eventName'] == 'click_messenger_ad']['user_pseudo_id'])
        users_who_messaged = set(df[df['eventName'] == 'user_message']['user_pseudo_id'])
        users_who_contacted = set(df[df['eventName'] == 'contact_info_submitted_phone']['user_pseudo_id'])

        # --- 3. Define Path-Specific User Sets for Attribution ---
        # Determine the source of each user's journey.
        ad_sourced_messaged_users = users_who_clicked.intersection(users_who_messaged)
        organic_sourced_messaged_users = set(df[(df['ad']=='Organic')]['user_pseudo_id'])

        # Trace the journey to the final conversion step.
        ad_sourced_contacted_users = ad_sourced_messaged_users.intersection(users_who_contacted)
        organic_sourced_contacted_users = organic_sourced_messaged_users.intersection(users_who_contacted)

        # --- 4. Build the Links for the Sankey Diagram ---
        links_list = []

        # Link 1: ad_id -> click_messenger_ad
        ad_clicks = df[df['eventName'] == 'click_messenger_ad'].groupby('ad')['user_pseudo_id'].nunique().reset_index()
        for _, row in ad_clicks.iterrows():
            links_list.append({'source': row['ad'], 'target': 'Click Messenger Ads', 'value': row['user_pseudo_id']})

        # Link 2: click_messenger_ad -> user_message (from Ads)
        if len(ad_sourced_messaged_users) > 0:
            links_list.append({'source': 'Click Messenger Ads', 'target': 'User Message (from Ads)', 'value': len(ad_sourced_messaged_users)})

        # Link 3: Organic -> user_message (from Organic)
        if len(organic_sourced_messaged_users) > 0:
            links_list.append({'source': 'Organic', 'target': 'User message (from Organic)', 'value': len(organic_sourced_messaged_users)})

        # Link 4: user_message (from Ads) -> contact_info_submitted_phone
        if len(ad_sourced_contacted_users) > 0:
            links_list.append({'source': 'User Message (from Ads)', 'target': 'Phone Contact', 'value': len(ad_sourced_contacted_users)})

        # Link 5: user_message (from Organic) -> contact_info_submitted_phone
        if len(organic_sourced_contacted_users) > 0:
            links_list.append({'source': 'User message (from Organic)', 'target': 'Phone Contact', 'value': len(organic_sourced_contacted_users)})

        # --- 5. Calculate Drop-offs and Add Exit Links ---
        # Drop-off 1: From click to message
        click_dropoff = len(users_who_clicked) - len(ad_sourced_messaged_users)
        if click_dropoff > 0:
            links_list.append({'source': 'Click Messenger Ads', 'target': 'Exit', 'value': click_dropoff})

        # Drop-off 2: From ad-sourced message to contact
        ad_message_dropoff = len(ad_sourced_messaged_users) - len(ad_sourced_contacted_users)
        if ad_message_dropoff > 0:
            links_list.append({'source': 'User Message (from Ads)', 'target': 'Exit', 'value': ad_message_dropoff})

        # Drop-off 3: From organic-sourced message to contact
        organic_message_dropoff = len(organic_sourced_messaged_users) - len(organic_sourced_contacted_users)
        if organic_message_dropoff > 0:
            links_list.append({'source': 'User message (from Organic)', 'target': 'Exit', 'value': organic_message_dropoff})

        # Create the final links DataFrame
        links = pd.DataFrame(links_list)

        # --- 6. Define Nodes and Build the Diagram ---
        all_nodes = pd.concat([links['source'], links['target']]).unique().tolist()
        node_map = {node: i for i, node in enumerate(all_nodes)}

        inflow_totals = links.groupby('target')['value'].sum()
        outflow_totals = links.groupby('source')['value'].sum()
        node_totals = inflow_totals.copy()
        # Identify start nodes (sources that are never targets) and use their outflow as their total value
        start_nodes = set(outflow_totals.index) - set(inflow_totals.index)
        for node in start_nodes:
            node_totals[node] = outflow_totals[node]


        # Add custom data for tooltips: calculate percentage of users that proceed from the source node
        links['percent_of_source'] = links.apply(lambda row: (row['value'] / node_totals[row['source']]) * 100 if row['source'] in node_totals and node_totals[row['source']] > 0 else 0, axis=1)

        # sankey_sources = links['source'].map(node_map)
        # sankey_targets = links['target'].map(node_map)
        # sankey_values = links['value']

        # # --- 7. Build and Display the Sankey Diagram ---
        # fig = go.Figure(data=[go.Sankey(
        #     arrangement='snap',
        #     node=dict(
        #         pad=25,
        #         thickness=20,
        #         line=dict(color="black", width=1),
        #         label=all_nodes,
        #         hovertemplate='Stage: %{label}<br>Total Users: %{value}<extra></extra>',
        #         hoverlabel=dict(
        #             bgcolor="#374151",
        #             font=dict(
        #                 family="Roboto, sans-serif",
        #                 size=12,
        #                 color="white"
        #             ),
        #             bordercolor="#4B5563"
        #         ),
        #         # A more modern color palette for dark mode
        #         color=['#836FFF' if 'ad_' in str(node) or 'Organic' in str(node) else '#F0F3FF' if 'submit' in str(node) else '#211951' if 'Exit' in str(node) else '#15F5BA' for node in all_nodes]
        #     ),
        #     link=dict(
        #         source=sankey_sources,
        #         target=sankey_targets,
        #         value=sankey_values,
        #         # Lighter link color for dark background
        #         hovertemplate='Path: %{source.label} → %{target.label}<br>' +
        #                     'Users: <b>%{value}</b><br>' +
        #                     'Conversion Rate: <b>%{customdata:.2f}%</b> of %{source.label}<extra></extra>',
        #         customdata=links['percent_of_source'],
        #         hoverlabel=dict(
        #             bgcolor="#374151",
        #             font=dict(family="Arial, sans-serif", size=12, color="white"),
        #             bordercolor="#4B5563"
        #         ),
        #         color='rgba(156, 163, 175, 0.8)'
        #     )
        # )])

        # fig.update_layout(
        #     # title_text="User Journey: Funnel exploration",
        #     # title_x=0.5,
        #     # Dark mode theme
        #     font=dict(
        #         family="Roboto, sans-serif", # Custom font
        #         size=12,
        #         color="#15F5BA"
        #     ),
        #     paper_bgcolor="rgba(0, 0, 0, 0)",
        #     plot_bgcolor="rgba(0, 0, 0, 0)",
        #     width=1280 ,
        #     height=620
        # )

        # sankeyHTML = fig.to_html(full_html=True, include_plotlyjs='cdn')
        # return Response(sankeyHTML, mimetype='text/html')

        #Retrun data
        links['typeLabel'] = np.where(links['source'].str.contains("Ads") | links['target'].str.contains("Ads"), "Ads", "Organic")
        nodeWeight = links.pivot_table(index="source", values='value', aggfunc='sum').reset_index()
        
        l = links.rename(columns={
            "source": "from",
            "target": "to"
        }).to_json(orient='records')

        n = nodeWeight.rename(columns={
            "source": "from"
        }).to_json(orient='records')

        return {'status': "success", "message": "Done", "data": {
            "link": json.loads(l),
            "node": json.loads(n)
        }}, 200
    
    return {'status': 'error', 'message': 'Method not allowed'}, 405

@query.route('/overview', methods=['GET'])
def get_overview_data():
    if request.method == 'GET':
        data = request.args
        for req in ['property_id', 'start_date', 'end_date']:
            if req not in data:
                return {'message': f"{req} is required for overview query"}, 400
    
        property_id = data.get("property_id", None)
        start_date = data.get("start_date", None)
        end_date = data.get("end_date", None)

        from bigquery.bq import BigQuery
        import pandas as pd
        bq = BigQuery()
        dataAll = bq.getOverviewDashboard(property_id, start_date, end_date)
        df = pd.DataFrame(dataAll)

        if df.empty: 
            return {"status": "Empty data", "message": "Do not have data with this query"}, 404
        
        df['date'] = pd.to_datetime(df['eventTimeStamp']).dt.strftime("%Y-%m-%d")

        # Total Users
        totalUsers = len(df['user_pseudo_id'].unique())
            #series
        # totalUsersSeries = list(df.pivot_table(index="date", values="user_pseudo_id", aggfunc='nunique').reset_index()['user_pseudo_id'])

        # Total Events
        totalEvent = len(df['eventId'].unique())
            #series
        # totalEventSeries = list(df.pivot_table(index="date", values="eventId", aggfunc='nunique').reset_index()['eventId'])


        # Event user traffic
        line1 = df.pivot_table(index="date", values=["eventId", "user_pseudo_id"], aggfunc='nunique').reset_index()
        ## series users
        line1_eventSeries = list(line1['eventId'])
        line1_userSeries = list(line1['user_pseudo_id'])
        line1_xaxis = list(line1['date'])

        # Cal message
        dfMessage = df[df['eventName'].isin(['user_message', 'contact_info_submitted_phone'])]
        totalMessage = len(dfMessage['eventId'].unique())
        line2 = dfMessage.pivot_table(index="date", values=["eventId"], aggfunc='nunique').reset_index()
        line2_MessageSeries = list(line2['eventId'])
        messageList = list(dfMessage.sort_values(by=['user_pseudo_id', 'eventTimeStamp'], ascending=[False, False]).drop_duplicates(subset=['user_pseudo_id']).head(10)['message'])

        #Add engagement
        df_Ads = df[df['ad_id'].notnull()]
        df_AdsSeries = df_Ads.pivot_table(index=['date'], columns=['eventName'], values=["eventId"], aggfunc='nunique').reset_index()
        df_AdsSeries.columns = [col[1] if col[1] else col[0] for col in df_AdsSeries.columns]
        ads_Click = list(df_AdsSeries['click_messenger_ad'].to_list())
        ads_Message = list(df_AdsSeries['user_message'].to_list())
        
        packData = Pack.packOverviewDashboard(totalUsers, line1_userSeries, totalEvent, line1_eventSeries, line1_xaxis, totalMessage, line2_MessageSeries, messageList, ads_Click, ads_Message)
        returnObj = {"data" : packData}
        returnObj['status'] = "success"
        returnObj['message'] = "Done"
        return returnObj, 200

    return {'status': 'error', 'message': 'Method not allowed'}, 405
    
@query.route('/profile', methods=['GET'])   
def get_profile():
    if request.method == 'GET':
        data = request.args
        for req in ['property_id']:
            if req not in data:
                return {'message': f"{req} is required for getting profile"}, 400

        property_id = data.get("property_id", None)
        limit = data.get("limit", 50)
        
        bq = BigQuery()
        existing_chanel = bq.get_all_channel_property(property_id)
        existing_chanel_str = ", ".join(f"'{c}'" for c in existing_chanel)
        profile = bq.get_profile_by_property_id(property_id,existing_chanel_str,limit)
        
        return {'status': 'ok', 'data': profile}, 200
    return {'status': 'error', 'message': 'Method not allowed'}, 405

@query.route('/journey/all-user', methods=['GET'])
@swag_from('/app/swagger_docs/journey_all_user.yml')
def journey_all_user_data():
    if request.method == 'GET':
        try:
        
            data = request.args
            for req in ['property_id', 'date_start', 'date_end', 'page']:
                if req not in data:
                    return {'message': f"{req} is required"}, 400
            
            is_clear_cache = Function.to_bool(data.get('is_clear_cache', False))
            sort = 'Last Update.desc' if 'sort' not in data else data.get('sort')
            property_id = data.get("property_id", None)
            date_start = data.get("date_start", (datetime.now() - timedelta(days=7)).strftime("%Y-%m-%d"))
            date_end = data.get("date_end", datetime.now().strftime("%Y-%m-%d"))
            page = data.get("page", "1")
            page_number = int(page)
            
            start_total = time.time()
            
            from query.genQuery import GenerateQuery
            genq = GenerateQuery()
            
            #Generate query
            query_text = f"journey_all_user,{property_id},{date_start},{date_end}"
            orderByAllowedFields = ["ID", "Last Update", "Facebook Engagement", "Facebook Ads", 
                                    "Messenger Chat", "LINE Chat", "Website", "Sales"]
            start_query = time.time()
            
            query = genq.getEvent(property_id, date_start, date_end)
            
            end_query = time.time()
            query_time = end_query - start_query
            
            # query_id = genq.generate_event_id(f"{query}_{sort}")

            #Check cache
            from bigquery.bq import BigQuery
            import numpy as np
            bq = BigQuery()
            df = bq.get_query_df(query)
            
            start_prep1 = time.time()
            
            #Prep phase
            # df = pd.DataFrame(data)
            
            df.loc[(df['eventName'].str.contains('_add', na=False)) & 
                (df['source'].str.contains('facebook', na=False)), 'eventLabel'] = 'Facebook engagement'

            df.loc[(df['eventName'] == 'user_message') & 
                (df['source'].str.contains('facebook', na=False)), 'eventLabel'] = 'Messenger Chat'

            df.loc[(df['eventName'] == 'user_message') & 
                (df['source'].str.contains('line', na=False)), 'eventLabel'] = 'LINE Chat'

            df.loc[(df['eventName'] == 'click_messenger_ad') & 
                (df['source'].str.contains('facebook', na=False)), 'eventLabel'] = 'Facebook Ads'
            
            end_prep1 = time.time()
            prepq1_time = end_prep1 - start_prep1
            
            start_prep2 = time.time()

            drop_lead_users = df.loc[df['eventName'] == 'contact_info_submitted_phone', 'user_pseudo_id'].unique()
            df['userStatus'] = np.where(df['user_pseudo_id'].isin(drop_lead_users), 'Drop leads', 'Anonymous')
            df_pivot = df.groupby(['user_pseudo_id', 'userStatus', 'eventLabel']).size().unstack(fill_value=0)
            
            end_prep2 = time.time()
            prepq2_time = end_prep2 - start_prep2
            
            start_prep3 = time.time()
            # Convert only numeric columns to int (i.e. count columns)
            for col in df_pivot.select_dtypes(include='number').columns:
                df_pivot[col] = df_pivot[col].astype(int)

            df_pivot = df_pivot.reset_index()
            
            df_last_event = df.groupby('user_pseudo_id')['eventTimeStamp'].max().reset_index()

            final_df = df_pivot.merge(df_last_event,on='user_pseudo_id',how='inner')
            final_df.rename(
                columns={
                    "userStatus" : "User Status",
                    "user_pseudo_id": "ID",
                    "eventTimeStamp": "Last Update"
                },
                inplace=True
            )
            
            end_prep3 = time.time()
            prepq3_time = end_prep3 - start_prep3

            start_prep4 = time.time()
            
            sorted_df = Function.apply_sort(final_df, sort)
            sorted_df.reset_index(drop=True,inplace=True)
            total_rows = sorted_df.shape[0]
            total_rows_up  = math.ceil(total_rows / 50)
            
            end_prep4 = time.time()
            prep4_time = end_prep4 - start_prep4


            start_prep5 = time.time()
            rows_per_page = 50
            start_row = (page_number - 1) * rows_per_page
            end_row = page_number * rows_per_page
            end_prep5 = time.time()
            prep5_time = end_prep5 - start_prep5

            page_df = sorted_df.iloc[start_row:end_row]
            sorted_json = page_df.to_dict(orient='records')
            
            #Add time performace log
            logging.info(f"Q1:{query_time}, P1:{prepq1_time}, P2:{prepq2_time}, P3:{prepq3_time}, P4:{prep4_time}, P5:{prep5_time}")
            
            return {'status': "ok", 'data': sorted_json, 'total_page': total_rows_up}, 200
        
        except Exception as e:
            return {'status': 'error', 'message': f"{e}"}, 500
    
    return {'status': 'error', 'message': 'Method not allowed'}, 405
            


