Callbacks working fine in windows but in Linux callbacks are not working in parallel/concurrently

I have a code which pulls data from SQL and display progress bar. What i do is generate a pickle file with user_id in its name. The file contains progress value. Alternatively, dcc.interval component id is fired to read that file after some interval and update progress bar.

So : data_refresh_code=> pulls data from SQL in a for loop and generate a pickle file containing progress value.
upload_metric_cloud=reads file generated by SQL and update dcc.store value. The file is read by another call back which updates progress bar. It is a multipage app so I have hidden some code.

The code is working perfectly in Windows but when i deploy in Azure app, this thing get stucks . When whole data is downloaded from SQL, dcc.store gets updated to 100 and progress bar reaches 100.

Following is a basic code:

import numpy as np
import plotly.express as px
import pandas as pd
from dash_extensions.enrich import DashProxy, Output, Input, State, ServersideOutput, html, dcc, \
    ServersideOutputTransform,callback,FileSystemStore,callback_context
import plotly.graph_objects as go
from plotly.subplots import make_subplots
import plotly.io as pio
pio.renderers.default='browser'
from datetime import date, timedelta
import datetime
import numpy as np
import urllib.parse
import urllib
import dash_bootstrap_components as dbc
import sqlalchemy
import base64 
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad,unpad
# import dask.dataframe as dd
# from pandarallel import pandarallel
from dash.exceptions import PreventUpdate
import dash
selected_chart_template='simple_white'
ai_green="#228779"
ai_gray="#bab0ac"
color_sch=px.colors.sequential.Greens
color_sch.reverse()  
import pyodbc
import pickle
import os
# a=[x[0] for x in os.walk(os.getcwd())]
# path=a[0]








###########################################################################################
#Gather all call backs
option_selected = dbc.Container([
        dbc.Row(
            [
                
                dbc.Col(
                    html.Div([
                    html.H6(id="data_refresh"),
                    dbc.Button("Refresh Data",id="refresh_button_init",n_clicks=None,color="primary"),
                    dcc.Interval(
                    id='interval-component',
                    interval=1*5000, # in milliseconds
                    n_intervals=0
                            )
                    ])
                ),
                
                dbc.Col(
                    html.Div([
                    html.H6(id="data_process"),
                    dbc.Button("Process data",id="process_data",n_clicks=None,color="primary"),
                    ])
                ),
                
                dbc.Col(
                    html.Div([
                    html.H6("AGGREGATION"),
                    dcc.Dropdown(
                    id='aggregation',
                    options=["DAY","MONTH","YEAR"],
                    value="MONTH",
                    optionHeight=60)       
                         ])
                    ),
                
            ]
        ),
        dbc.Row(dbc.Col(dbc.Progress(label="0%",value=0,color=ai_green, style={"height": "20px","font-size": "25px",\
                                                                               "color": "white"},\
                                     striped =True,hide_label=False,min=0,max=100,id="progress_bar"))),
            
         
        html.Hr(),
        html.Hr(),
        dbc.Spinner(dbc.Row(dbc.Col(id="table_init")))
        
        ],
    fluid=True,style=CONTENT_STYLE
)



#####################################################
#database progress updater
@callback(Output('dictionary_client', 'data'),
              Input('interval-component', 'n_intervals'),
              State('user-id','data'),
              prevent_initial_call=True)
def upload_metric_cloud(n,client_id):
    
        with open("clientId___"+str(client_id)+".pkl", 'rb') as f:
            datframe = pickle.load(f)
            
        dictionary={}
        dictionary[str(client_id)]=datframe["client_id"][0]
        return dictionary

@callback(Output('progress_bar', 'value'),
          Output("progress_bar", 'label'),
          State('user-id','data'),
          Input('dictionary_client', 'data'),prevent_initial_call=True)
def update_metrics(user_id,dict):
    client_id=user_id
    print("")
    progress=dict[str(client_id)]
    # print("progress : ",progress)


    return progress,f"{progress}%"


    
    
    
 ##################################################################################                


#
@callback(
    Output('user-id','data'),
    Input("url","search"),
    State('user-id','data'),prevent_initial_call=True)
def client_id(params,u_id):
    print("UID++++++++++++++++++++++++++=====================> : ",u_id)
    if params is not None:
       
        parsed = urllib.parse.urlparse(params)
        parsed_dict = parsed.query
        
        encrypted_client_id=parsed_dict.replace('client_id=',"")
        if encrypted_client_id == "":
            encrypted_client_id=u_id
            
        print("changing",encrypted_client_id)
        return str(encrypted_client_id)
    else:
        print("UID : ",u_id)
        return u_id
    
    
@callback(
    ServersideOutput("initial_whole_data", "data"), 
    Input("refresh_button_init","n_clicks"),
    State('user-id','data'),
    prevent_initial_call=True,
    )
def data_refresh_code(refresh_button_init,client_id):
    

 
            if refresh_button_init is not None:
                
                print("@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@@{}@@@@@@@@{}@@@@@@@@@@@@".format(callback_context.triggered_id,refresh_button_init))
                tableau_file=data_pull_sql(client_id)
                return tableau_file
            
            else:
                
                dash.no_update