Hi everyone.
I’m displaying a real-time data with dash datatable. Data comes from a pulsar topic.
Flow is as follows:
I have a timer which is n_interval.
For each tick, I consume new data and draw datatable.
My problem is: old data (rows) is still on table even if new data does not contains that rows, they somehow still on table, so table is mixed with old and new data.
How can I ensure that when I display new data, old one is deleted?
Best regards.
Edit: My update table function is as follows:
@callback(
[
Output("table1", "data"),
Output("table2", "data"),
Output("last-update", "children"),
],
[
Input("timer", "n_intervals"),
Input("status", "data"),
Input("start-button", "n_clicks"),
],
prevent_initial_call=True,
)
def update_tables(n, status, but):
global consumer
if (
but != None
and but > 0
and status != None
and status[0] == "Bağlantı VAR"
and consumer != None
):
msg = consumer.batch_receive()
msg = msg[-1]
# if message was published more than 5 seconds ago, ignore it
while msg.publish_timestamp() < (time.time() * 1000) - 5000:
consumer.acknowledge_cumulative(msg)
msg = consumer.batch_receive()
msg = msg[-1]
data = msg.data().decode("utf-8")
data = ast.literal_eval(data)
# data = json.loads(data).values()
ob = pd.DataFrame(data)
ob = pl.from_dataframe(ob)
# bunch of process with dataframe
tsstr = ob["Time"].max().strftime("%H:%M:%S")
tsstr = f"Last update: {tsstr}"
global table1data
global table2data
global table1summary
global table2summary
table1data = ob.filter(pl.col("SomeColumn") == "A")
table2data = ob.filter(pl.col("SomeColumn") == "B")
table1summary = table1data.group_by("SomeGroupColumn").agg(
[
# aggregations
]
)
table2summary = table2data.group_by("SomeGroupColumn").agg(
[
# aggregations
]
)
return [
table1summary.to_dicts(),
table2summary.to_dicts(),
tsstr,
]
else:
raise exceptions.PreventUpdate