Dash Ag Grid Refresh Cache and Purge Cache

Hello! The Infinite model has options such as Refresh Cache and Purge Cache. Example: React example

Please tell me how to use these two options in Dash Ag Grid? Since i am updating pandas data but cached rows data is not updating in grid

Hello @sergeyvyazov,

When posting links for AG-grid, please link to their docs and examples that show the code. It makes it a lot easier to determine how they did it.

With that said, you can find it here:

Now, currently there is no way to work with the cache because we don’t have endpoints designed for that.

However, we just got the go ahead on this issue:

This would give direct support to being able to access these apis via a clientside callback.

We may have this be opened as some way to send as a prop, but not entirely sure yet.

@jinnyzor Thanks for the comment. I will follow the updates in the above topic.

1 Like

Hello @ jinnyzor! The Dash Ag Grid update was recently released, which introduced the functionality that we recently talked about. Please tell me if it is now possible to implement Refresh Cache and Purge Cache into the grid

Hey @sergeyvyazov,

Sure is. :slight_smile:

You can also add event listeners if you so desire.

1 Like

I need it to happen automatically refreshInfiniteCache() Did I paste the code correctly?

import dash_ag_grid as dag
from dash import Dash, Input, Output, dcc, html, callback
import pandas as pd


app = Dash(__name__)

raw_data = {"id": [], "name": []}
for i in range(0, 10000):
    raw_data["id"].append(i)
    raw_data["name"].append(f"{i * 3 % 5}-{i * 7 % 15}-{i % 8}")

df = pd.DataFrame(data=raw_data)

app.layout = html.Div(
    [
        dcc.Markdown("Infinite scroll with sort and filter"),
        dag.AgGrid(
            id="infinite-sort-filter-grid",
            columnSize="sizeToFit",
            columnDefs=[
                {"field": "id", "filter": "agNumberColumnFilter"},
                {"field": "name"},
            ],
            defaultColDef={"sortable": True, "filter": True, "floatingFilter": True},
            rowModelType="infinite",
            dashGridOptions={
                # The number of rows rendered outside the viewable area the grid renders.
                "rowBuffer": 0,
                # How many blocks to keep in the store. Default is no limit, so every requested block is kept.
                "maxBlocksInCache": 1,
                "rowSelection": "multiple",
            },
        ),
    ],
    style={"margin": 20},
)

operators = {
    "greaterThanOrEqual": "ge",
    "lessThanOrEqual": "le",
    "lessThan": "lt",
    "greaterThan": "gt",
    "notEqual": "ne",
    "equals": "eq",
}


def filterDf(df, data, col):
    if data["filterType"] == "date":
        crit1 = data["dateFrom"]
        crit1 = pd.Series(crit1).astype(df[col].dtype)[0]
        if "dateTo" in data:
            crit2 = data["dateTo"]
            crit2 = pd.Series(crit2).astype(df[col].dtype)[0]
    else:
        crit1 = data["filter"]
        crit1 = pd.Series(crit1).astype(df[col].dtype)[0]
        if "filterTo" in data:
            crit2 = data["filterTo"]
            crit2 = pd.Series(crit2).astype(df[col].dtype)[0]
    if data["type"] == "contains":
        df = df.loc[df[col].str.contains(crit1)]
    elif data["type"] == "notContains":
        df = df.loc[~df[col].str.contains(crit1)]
    elif data["type"] == "startsWith":
        df = df.loc[df[col].str.startswith(crit1)]
    elif data["type"] == "notStartsWith":
        df = df.loc[~df[col].str.startswith(crit1)]
    elif data["type"] == "endsWith":
        df = df.loc[df[col].str.endswith(crit1)]
    elif data["type"] == "notEndsWith":
        df = df.loc[~df[col].str.endswith(crit1)]
    elif data["type"] == "inRange":
        if data["filterType"] == "date":
            df = df.loc[df[col].astype("datetime64[ns]").between_time(crit1, crit2)]
        else:
            df = df.loc[df[col].between(crit1, crit2)]
    elif data["type"] == "blank":
        df = df.loc[df[col].isnull()]
    elif data["type"] == "notBlank":
        df = df.loc[~df[col].isnull()]
    else:
        df = df.loc[getattr(df[col], operators[data["type"]])(crit1)]
    return df


@callback(
    Output("infinite-sort-filter-grid", "getRowsResponse"),
    Input("infinite-sort-filter-grid", "getRowsRequest"),
)
def infinite_scroll(request):
    dff = df.copy()

    if request:
        if request["filterModel"]:
            fils = request["filterModel"]
            for k in fils:
                try:
                    if "operator" in fils[k]:
                        if fils[k]["operator"] == "AND":
                            dff = filterDf(dff, fils[k]["condition1"], k)
                            dff = filterDf(dff, fils[k]["condition2"], k)
                        else:
                            dff1 = filterDf(dff, fils[k]["condition1"], k)
                            dff2 = filterDf(dff, fils[k]["condition2"], k)
                            dff = pd.concat([dff1, dff2])
                    else:
                        dff = filterDf(dff, fils[k], k)
                except:
                    pass
            dff = dff

        if request["sortModel"]:
            sorting = []
            asc = []
            for sort in request["sortModel"]:
                sorting.append(sort["colId"])
                if sort["sort"] == "asc":
                    asc.append(True)
                else:
                    asc.append(False)
            dff = dff.sort_values(by=sorting, ascending=asc)

        lines = len(dff.index)
        if lines == 0:
            lines = 1

        partial = dff.iloc[request["startRow"] : request["endRow"]]
        return {"rowData": partial.to_dict("records"), "rowCount": lines}

app.clientside_callback(
    """function () {
        dash_ag_grid.getApi('infinite-sort-filter-grid').refreshInfiniteCache()
        return dash_clientside.no_update
    }""",
    Output("infinite-sort-filter-grid", "getRowsResponse"),
    Input("infinite-sort-filter-grid", "getRowsResponse"),
    prevent_initial_call=True
)


if __name__ == "__main__":
    app.run(debug=True)

Each request you want it to destroy the cache?

Rather yes. Since I will be updating the original df very often

To update an already cached block with new data

Or if it’s more correct at the click of a button, then it’s true that I wrote the function itself and Input Output?

I don’t know how the process of the request vs response, but I’d fear that each time you do this, once you have multiple caches, it would end up in an endless loop.

It’d be better to hook it up to a button push or something similar.

It’s clear. If I do as you suggested and connect to the button click, what should be the Output? Did I write the code correctly? Tell me please

app.clientside_callback(
    """function (n) {
        dash_ag_grid.getApi('infinite-sort-filter-grid').refreshInfiniteCache()
        return dash_clientside.no_update
    }""",
    Output("infinite-sort-filter-grid", "getRowsResponse"),
    Input("button", "n-clicks"),
    prevent_initial_call=True
)

I think in essence, it should work. Id just use the button as an output since it then wouldn’t require a duplicate output on the getRowsResponse

Thank you. I will try to implement

Good afternoon! I used your answer (example at the link). Implemented. And purge cache works. But does not update all cached blocks above. Updates only the last loaded block. Tell me how to fix this? It is necessary that the previous block is also updated. I will attach an image of what the process looks like if it stops at the junction of two blocks

from dash import Dash, html, Input, Output, no_update, State, ctx
from dash_ag_grid import AgGrid
import dash_mantine_components as dmc
import pandas as pd


df = pd.read_csv(
        "https://raw.githubusercontent.com/plotly/datasets/master/ag-grid/olympic-winners.csv"
    )
app = Dash()

# basic columns definition with column defaults
columnDefs = [{"field": c} for c in df.columns]

app.layout = html.Div(
    [
        AgGrid(
            id="grid",
            columnDefs=columnDefs,
            defaultColDef={"resizable": True, "sortable": True, "filter": True},
            rowModelType="infinite",
            dashGridOptions={
                # "pagination": True
            },
            style={"height": 900, "width": "100%"},
        ),
        dmc.ChipGroup(
            [dmc.Chip(x, value=x) for x in ["United States", "Afghanistan"]],
            value="United States",
            id="filters",
        ),
        html.Button(id="fire-filters", children="Fire Filters"),
    ]
)


@app.callback(
    Output("grid", "getRowsResponse"),
    Input("grid", "getRowsRequest"),
    State("filters", "value"),
)
def update_grid(request, filters_data):
    if request:
        filtered_df = df[df['country'] == filters_data]

        partial = filtered_df.iloc[request["startRow"]: request["endRow"]]
        return {"rowData": partial.to_dict("records"), "rowCount": len(filtered_df.index)}

app.clientside_callback(
    """function (n) {
        dash_ag_grid.getApi('grid').purgeInfiniteCache()
        return dash_clientside.no_update
    }""",
    Output("fire-filters", "n_clicks"),
    Input("fire-filters", "n_clicks"),
    prevent_initial_call=True
)


if __name__ == '__main__':
    app.run(debug=True)

You are looking for refreshInfiniteCache, purgeInfiniteCache is used to destory any caching and will not request anything new unless scrolled to.

refreshInfiniteCache will spam the server with all the cached rows:

image

from dash import Dash, html, Input, Output, no_update, State, ctx
from dash_ag_grid import AgGrid
import dash_mantine_components as dmc
import pandas as pd


df = pd.read_csv(
        "https://raw.githubusercontent.com/plotly/datasets/master/ag-grid/olympic-winners.csv"
    )
app = Dash()

# basic columns definition with column defaults
columnDefs = [{"field": c} for c in df.columns]

app.layout = html.Div(
    [
        AgGrid(
            id="grid",
            columnDefs=columnDefs,
            defaultColDef={"resizable": True, "sortable": True, "filter": True},
            rowModelType="infinite",
            dashGridOptions={
                # "pagination": True
            },
            style={"height": 900, "width": "100%"},
        ),
        dmc.ChipGroup(
            [dmc.Chip(x, value=x) for x in ["United States", "Afghanistan"]],
            value="United States",
            id="filters",
        ),
        html.Button(id="fire-filters", children="Fire Filters"),
    ]
)


@app.callback(
    Output("grid", "getRowsResponse"),
    Input("grid", "getRowsRequest"),
    State("filters", "value"),
)
def update_grid(request, filters_data):
    if request:
        filtered_df = df[df['country'] == filters_data]

        partial = filtered_df.iloc[request["startRow"]: request["endRow"]]
        return {"rowData": partial.to_dict("records"), "rowCount": len(filtered_df.index)}

app.clientside_callback(
    """function (n) {
        dash_ag_grid.getApi('grid').refreshInfiniteCache()
        return dash_clientside.no_update
    }""",
    Output("fire-filters", "n_clicks"),
    Input("fire-filters", "n_clicks"),
    prevent_initial_call=True
)


if __name__ == '__main__':
    app.run(debug=True)

We had used purgeInifiniteCache in the other example because there was pagination involved, which meant that we wanted to reset all the caches and request them anew because we werent trying to hold our place in the pages.

Thank you very much! Figured it out at last!

1 Like