How to display progress in server side call backs(dash-extension)

I am retrieving data from my database server. My data contains approximately 3 million rows. I am using dash-extension for storing large data in session storage. Now i want to display progress bar for the data retrieval progress. In dash.longcallbacks, it is possible to use progress component to show progress bar. How can i integrate long call backs with dash extension. In documentation of dash-extension it is written that dash-extension doesnot support long call backs. Is there any work around. I am stuck,

This is my long call back function for sql progress. I have copied it from dash website with slight modification.

import time
import dash
from dash import html
from dash.long_callback import DiskcacheLongCallbackManager
from dash.dependencies import Input, Output
import pandas as pd
import pyodbc
## Diskcache
import diskcache
cache = diskcache.Cache("./cache")
long_callback_manager = DiskcacheLongCallbackManager(cache)

app = dash.Dash(__name__, long_callback_manager=long_callback_manager)

app.layout = html.Div(
    [
        html.Div(
            [
                html.P(id="paragraph_id", children=["Button not clicked"]),
                html.Progress(id="progress_bar",value='0'),
            ]
        ),
        html.Button(id="button_id", children="Run Job!"),
        html.Button(id="cancel_button_id", children="Cancel Running Job!"),
    ]
)


@app.long_callback(
    output=Output("paragraph_id", "children"),
    inputs=Input("button_id", "n_clicks"),
    progress=[Output("progress_bar", "value"), Output("progress_bar", "max")],prevent_initial_call=True)
def database_extractor(set_progress,n_clicks):
    client_id=100
    server = '----------'
    database = '-----------'
    username = '----------'
    password = '------------'   
    driver= '{ODBC Driver 13 for SQL Server}'

    
    
    db=pd.DataFrame()
    with pyodbc.connect('DRIVER='+driver+';SERVER=tcp:'+server+';PORT=1433;DATABASE='+database+';UID='+username+';PWD='+ password) as conn:
        with conn.cursor() as cursor:
            cursor.execute("SELECT Total_Rows= SUM(st.row_count) FROM sys.dm_db_partition_stats st \
                WHERE \
        object_name(object_id) = 'SalesAggregates' AND (index_id < 2)")
        
            row = cursor.fetchone()
            count=row[0]
            chunksize=100000
            tot=int(count/chunksize)
            
              
            cursor.execute(f"select Date,CustomerID,product_type,quantity,amount,price,product_id,variant_id,sku\
            ,variant_title,revenue,total_products,sum_qty,sum_dollar_value,avg_selling_price,count_orders,unique_customer\
                  ,revenue_id,total_products_id,sum_qty_id,sum_dollar_value_id,avg_selling_price_id,count_orders_id,unique_customer_id\
                  ,revenue_var,total_products_var,sum_qty_var,sum_dollar_value_var,avg_selling_price_var,count_orders_var,unique_customer_var\
                  ,unique_sku,sum_qtys,avg_product_per_customer from SalesAggregates where cid = {client_id} and quantity IS NOT NULL")
            count=0
            for i in (range(tot)):
                count=count+1
                row=cursor.fetchmany(chunksize)
                print("row : {} ---count: {}".format(len(row),count))
                # df = pd.DataFrame(row, columns = ['Date', 'CustomerID'])
                df = pd.DataFrame.from_records(row, columns=[col[0] for col in cursor.description])
                db=pd.concat([db,df])
                set_progress((str(i + 1), str(tot)))
            db.to_pickle(r"high_file.pkl")
            return "done"
        
if __name__ == "__main__":
    app.run_server(debug=True,port=8054)