Hello! First timer here, Im having an issue that I think is a result of one file writing data, and dash reading data at the same time from the same file.
One file, gather_data.py, is perpetually asking for data from a sensor, averaging out, and outputting an average that it then collects and publishes to the power.db file. I’m trying to make a dash site that takes that constantly growing database file, and creates a live graph. In practice however, if I run the dash file first, I have a static website, and if I run the data gatherer first, dash freaks out with this error:
Callback failed: the server did not respond.
(This error originated from the built-in JavaScript code that runs Dash apps. Click to see the full stack trace or open your browser’s console.)
Error: Callback failed: the server did not respond.
at handleError (http://127.0.0.1:8050/_dash-component-suites/dash/dash-renderer/build/dash_renderer.v3_0_4m1750843899.dev.js:75718:14)
I know that they are talking to each other, and independently connecting to the power.db file, because if I run the data gathering file first, let it finish after a certain amount of time, then run the dash file, I get a static graph. The issues arise when I try to run them at the same time.
Here’s the data gathering file, that calls the function avg_over2hsec from another file that communicates with the sensor to output an average.
import sqlite3
print(sqlite3.sqlite_version)
from test_power import avg_over_2hsec
import time
conn = sqlite3.connect('power.db')
print("Opened database successfully")
conn.execute('DROP TABLE IF EXISTS POWER;')
conn.execute('''
CREATE TABLE IF NOT EXISTS POWER (
TIME INT PRIMARY KEY,
APW1 INT NOT NULL,
APW2 INT NOT NULL,
APW3 INT NOT NULL,
APWTOT INT NOT NULL
);
''')
conn.commit()
print("Table created successfully")
n = 0
for n in range (0,3):
time_stamp = int(time.time())
appow1 = round(250 * avg_over_2hsec(25))
print(f"[{n}]Active Power 1 Done")
print("Active Power 1 Done")
appow2 = round(250 * avg_over_2hsec(27))
print(f"[{n}]Active Power 2 Done")
print("Active Power 2 Done")
appow3 = round(250 * avg_over_2hsec(29))
print(f"[{n}]Active Power 3 Done")
print("Active Power 3 Done")
appowtot = round(250 * avg_over_2hsec(43))
print(f"[{n}]Active Power Total Done")
print("Active Power Total Done")
conn.execute("INSERT INTO POWER (TIME, APW1, APW2, APW3, APWTOT) \
VALUES (?, ?, ?, ?, ?)", (time_stamp, appow1, appow2, appow3, appowtot)
);
conn.commit()
n = n + 1
Here’s the dash code:
import datetime
import dash
from dash import Dash, dcc, html, Input, Output, callback
from plotly.subplots import make_subplots
import sqlite3
import pandas as pd
app = Dash()
app.layout = html.Div(
html.Div([
html.H4('Title'),
html.Div(id='live-update-text'),
dcc.Graph(id='live-update-graph'),
dcc.Interval(
id='interval-component',
interval=10*1000, # in milliseconds
n_intervals=0
)
])
)
@app.callback(Output('live-update-graph', 'figure'),
Input('interval-component', 'n_intervals'))
def update_graph_live(n):
try:
conn = sqlite3.connect('power.db', check_same_thread=False)
df = pd.read_sql_query('SELECT * FROM POWER ORDER BY TIME DESC LIMIT 180', conn)
conn.close()
df = df.sort_values(by='TIME')
#create the figure
fig = make_subplots(rows=2, cols=1, vertical_spacing=0.2)
fig.update_layout(
margin={'l': 30, 'r': 10, 'b': 30, 't': 10},
legend={'x': 0, 'y': 1, 'xanchor': 'left'}
)
# plot various powers over time
fig.add_trace({
'x': df['TIME'],
'y': df['APW1'],
'name': 'APW1',
'mode': 'lines+markers',
'type': 'scatter'
}, row=2, col=1)
fig.add_trace({
'x': df['TIME'],
'y': df['APW2'],
'name': 'APW2',
'mode': 'lines+markers',
'type': 'scatter'
}, row=2, col=1)
fig.add_trace({
'x': df['TIME'],
'y': df['APW3'],
'name': 'APW3',
'mode': 'lines+markers',
'type': 'scatter'
}, row=2, col=1)
fig.add_trace({
'x': df['TIME'],
'y': df['APWTOT'],
'name': 'Total Power',
'mode': 'lines+markers',
'type': 'scatter'
}, row=2, col=1)
except Exception as e:
print(f"[ERROR] Could not fetch data: {e}")
return fig
if __name__ == '__main__':
app.run(debug=True)
Any direction as to what I should do to make them not compete for the databases attention at the same time would be appreciated! Or, if that’s not the issue, advice in general : )