How do I share the xaxis between 1a and 1 an xaxis2 between 2a and 2 ?
I want to be able to zoom on either 1 or 1a graphs and it to automatically update the xaxis in the other graph.
I want 1 to be tied to 1a and 2 to be tied to 2a
fig1a.update_xaxes(range=fig1[‘layout’][‘xaxis’][‘range’])
fig2a.update_xaxes(range=fig2[‘layout’][‘xaxis’][‘range’])
or do i really need subplots ?
import streamlit as st
import pandas as pd
import psycopg2
import toml
import plotly.graph_objects as go
# Read PostgreSQL connection details from secrets.toml
secrets = toml.load(".streamlit/secrets.toml")
postgres_config = secrets.get("postgresql", {}) # Provide an empty dictionary as default if the key is missing
# Apply custom CSS to make sidebar text smaller
st.markdown("""
<style>
.sidebar .sidebar-content {
font-size: 8;
}
</style>
""", unsafe_allow_html=True)
# Establish a connection to your PostgreSQL database
try:
conn = psycopg2.connect(
host=secrets['connections']['postgresql']['host'],
database=secrets['connections']['postgresql']['database'],
user=secrets['connections']['postgresql']['username'],
password=secrets['connections']['postgresql']['password']
)
cursor = conn.cursor()
col1, col2 = st.columns(2)
# Execute the SQL query to fetch tickers where priority_interest is 1
datas = ['adj_close_price','close_price','high_price','low_price','volume']
# Get user input for ticker from the fetched tickers
data = st.sidebar.selectbox('data', datas) if datas else None
# type of moving average ma = simple moving average , ema = exponential moving average
ma_types = ['ma','ema']
ma_type = st.sidebar.selectbox(f'one-sided moving average type :\n\nma is simple, ema is exponential',
ma_types) if ma_types else None
# Execute the SQL query to fetch tickers where priority_interest is 1
ticker_query = "SELECT ticker FROM security WHERE priority_interest=1 ORDER BY ticker;"
# Get user input for ticker from the fetched tickers
tickers1 = pd.read_sql_query(ticker_query,
conn)['ticker'].tolist()
default_ticker1 = 'SPY' # Set your desired default ticker here
ticker1 = st.sidebar.selectbox("Left ticker:",
tickers1,
index=tickers1.index(default_ticker1) if default_ticker1 else 0,
key="ticker1") if tickers1 else 'SPY'
# Get user input for ticker from the fetched tickers
tickers2 = pd.read_sql_query(ticker_query, conn)['ticker'].tolist()
ticker2 = st.sidebar.selectbox("Right ticker:",
tickers2,
key="ticker2") if tickers2 else None
# Execute the SQL query to get the last date using rank
last_date_query = """
SELECT CAST(price_date AS DATE)
FROM (
SELECT price_date, RANK() OVER (
PARTITION BY ticker
ORDER BY price_date DESC
) AS r
FROM daily_price
JOIN security ON security.id = daily_price.ticker_id
WHERE ticker = '{ticker}'
) AS ranked_prices
WHERE r = 1;
""".format(ticker=ticker1)
# Execute the SQL query to get the last date using rank
name_query = """
SELECT name
FROM security
WHERE ticker = '{ticker}';
""".format(ticker=ticker1)
# Execute the last date SQL query and fetch the result
cursor.execute(name_query)
security_name = cursor.fetchone()[0] # Fetch the first column of the first row (the last date)
# Display the security name above fig1
col1.text(f'{security_name}')
# Execute the SQL query to get the name using rank for the second ticker
name_query2 = """
SELECT name
FROM security
WHERE ticker = '{ticker}';
""".format(ticker=ticker2)
# Execute the name SQL query and fetch the result for the second ticker
cursor.execute(name_query2)
security_name2 = cursor.fetchone()[0] # Fetch the name from the result
# Execute the last date SQL query and fetch the result
cursor.execute(last_date_query)
last_date = cursor.fetchone()[0] # Fetch the first column of the first row (the last date)
# Get user input for custom SQL query in the sidebar
# custom_sql1 = st.sidebar.text_area("Resulting QUERY :",
# "SELECT cast(price_date as DATE), {data} FROM daily_price JOIN security ON security.id = daily_price.ticker_id WHERE ticker = '{ticker}' ORDER BY price_date asc;")
custom_sql1 = st.sidebar.text_area("Resulting QUERY :",
'''SELECT COALESCE(cast(dp.price_date as DATE),cast(pe.date as DATE)) as price_date,
COALESCE(dp.ticker_id, pe.ticker_id) as ticker_id,
dp.{data},
pe._50{ma_type},
pe._200{ma_type2}
FROM
daily_price dp
LEFT JOIN
performance_individual_entity pe ON dp.ticker_id = pe.ticker_id
AND cast(dp.price_date as DATE) = cast(pe.date as DATE)
JOIN
security s ON s.id = COALESCE(dp.ticker_id, pe.ticker_id)
WHERE
s.ticker = '{ticker}'
ORDER BY
price_date asc;''')
# Replace {ticker} placeholder with the user's input in the SQL query
formatted_sql_query1 = custom_sql1.format(data=data,ma_type=ma_type,ma_type2=ma_type,ticker=ticker1)
# Execute the formatted SQL query and fetch results into a DataFrame
df1 = pd.read_sql_query(formatted_sql_query1, conn, parse_dates=['price_date'], index_col='price_date')
custom_sql1a = '''SELECT perf.date as date,
perf.annl_sharpe,
perf.annl_geom_mean_ret,
perf.daily_skw
FROM
performance_individual_entity perf
JOIN
security s ON s.id = perf.ticker_id
WHERE
s.ticker = '{ticker}'
ORDER BY
perf.date asc;'''
# Replace {ticker} placeholder with the user's input in the SQL query
formatted_sql_query1a = custom_sql1a.format(ticker=ticker1)
# Execute the formatted SQL query and fetch results into a DataFrame
df1a = pd.read_sql_query(formatted_sql_query1a, conn, parse_dates=['date'], index_col='date')
custom_sql2a = '''SELECT perf.date as date,
perf.annl_sharpe,
perf.annl_geom_mean_ret,
perf.daily_skw
FROM
performance_individual_entity perf
JOIN
security s ON s.id = perf.ticker_id
WHERE
s.ticker = '{ticker}'
ORDER BY
perf.date asc;'''
# Replace {ticker} placeholder with the user's input in the SQL query
formatted_sql_query2a = custom_sql1a.format(ticker=ticker2)
# Execute the formatted SQL query and fetch results into a DataFrame
df2a = pd.read_sql_query(formatted_sql_query2a, conn, parse_dates=['date'], index_col='date')
# Replace {ticker} placeholder with the user's input in the SQL query
formatted_sql_query2 = custom_sql1.format(data=data,ma_type=ma_type,ma_type2=ma_type,ticker=ticker2)
# Execute the formatted SQL query and fetch results into a DataFrame
df2 = pd.read_sql_query(formatted_sql_query2, conn, parse_dates=['price_date'], index_col='price_date')
# # Main container for the graph
# st.markdown(f'<h3>{ticker1} : {data} : latest-{last_date}</h3>', unsafe_allow_html=True)
# Add range sliders to the layout
range_slider_layout = dict(
xaxis=dict(
rangeselector=dict(
buttons=list([
dict(count=5,
label='1w',
step='day',
stepmode='backward'),
dict(count=1,
label='1m',
step='month',
stepmode='backward'),
dict(count=6,
label='6m',
step='month',
stepmode='backward'),
dict(count=9,
label='9m',
step='month',
stepmode='backward'),
dict(count=1,
label='YTD',
step='year',
stepmode='todate'),
dict(count=1,
label='1y',
step='year',
stepmode='backward'),
dict(count=5,
label='5y',
step='year',
stepmode='backward'),
dict(count=10,
label='10y',
step='year',
stepmode='backward'),
dict(step='all')
])
),
rangeslider=dict(
visible=True
),
type='date'
)
)
# Format the x-axis dates as "06-Jan-2023"
plt_df1 = df1.copy()
#plt_df1.index = plt_df1.index.strftime('%d %b \'%y')
plt_df1.index = plt_df1.index.strftime('%Y-%m-%d')
# Create a Plotly figure
fig1 = go.Figure()
# Add the trace to the figure
fig1.add_trace(go.Scatter(x=plt_df1.index, y=plt_df1[data], mode='lines'))
fig1.add_trace(go.Scatter(x=plt_df1.index, y=plt_df1[f'_50{ma_type}'], mode='lines', line=dict(color='red')))
fig1.add_trace(go.Scatter(x=plt_df1.index, y=plt_df1[f'_200{ma_type}'], mode='lines', line=dict(color='blue')))
# Enable spikes on x-axis and y-axis for crosshairs effect and set spike color to gray
fig1.update_xaxes(showspikes=True, spikemode='across', spikesnap='cursor', spikecolor='gray')
fig1.update_yaxes(showspikes=True, spikemode='across', spikesnap='cursor', spikecolor='gray')
# Update x-axis tickformat to "06-Jan-2023" and rotate labels by 45 degrees to the left
fig1.update_xaxes(type='category', tickangle=45)
fig1.update_layout(hovermode='x unified', yaxis=dict(scaleanchor="x", scaleratio=1))
fig1.update_layout(title=f'{ticker1} : {data} : latest-{last_date}')
fig1.update_layout(showlegend=False)
# Update layout for fig1
fig1.update_layout(range_slider_layout)
#fig1.update_layout(width=2000)
# # Display the Plotly figure
# st.plotly_chart(fig1)
# # Main container for the graph
# st.markdown(f'<h3>{ticker2} : {data} : latest-{last_date}</h3>', unsafe_allow_html=True)
# Format the x-axis dates as "06-Jan-2023"
plt_df2 = df2.copy()
plt_df2.index = plt_df2.index.strftime('%Y-%m-%d')
# Create a Plotly figure
fig2 = go.Figure()
# Add the trace to the figure
fig2.add_trace(go.Scatter(x=plt_df2.index, y=plt_df2[data], mode='lines', name='Price'))
fig2.add_trace(go.Scatter(x=plt_df2.index, y=plt_df2[f'_50{ma_type}'], mode='lines', line=dict(color='red'), name=f'50{ma_type}'))
fig2.add_trace(go.Scatter(x=plt_df2.index, y=plt_df2[f'_200{ma_type}'], mode='lines', line=dict(color='blue'), name=f'200{ma_type}'))
# Enable spikes on x-axis and y-axis for crosshairs effect and set spike color to gray
fig2.update_xaxes(showspikes=True, spikemode='across', spikesnap='cursor', spikecolor='gray')
fig2.update_yaxes(showspikes=True, spikemode='across', spikesnap='cursor', spikecolor='gray')
# Update x-axis tickformat to "06-Jan-2023" and rotate labels by 45 degrees to the left
fig2.update_xaxes(type='category', tickangle=45)
fig2.update_layout(hovermode='x unified', hoverdistance=0, yaxis=dict(scaleanchor="x", scaleratio=1))
fig2.update_layout(title=f'{ticker2} : {data} : latest-{last_date}')
fig2.update_layout(showlegend=True)
# Update layout for fig12
fig2.update_layout(range_slider_layout)
# Format the x-axis dates as "06-Jan-2023"
plt_df1a = df1a.copy()
plt_df1a.index = plt_df1a.index.strftime('%Y-%m-%d')
# Create a Plotly figure for df1a
fig1a = go.Figure()
fig1a.add_trace(go.Scatter(x=df1a.index, y=df1a['annl_sharpe'], mode='lines', name='Annl SR'))
fig1a.add_trace(go.Scatter(x=df1a.index, y=df1a['annl_geom_mean_ret'], mode='lines', name='Annl GeoMean Ret'))
fig1a.add_trace(go.Scatter(x=df1a.index, y=df1a['daily_skw'], mode='lines', name='Daily Skew'))
# Enable spikes on x-axis and y-axis for crosshairs effect and set spike color to gray
fig1a.update_xaxes(showspikes=True, spikemode='across', spikesnap='cursor', spikecolor='gray')
fig1a.update_yaxes(showspikes=True, spikemode='across', spikesnap='cursor', spikecolor='gray')
# Customize the layout for fig1a
fig1a.update_layout(title=f'{ticker1} - Performance Metrics',
yaxis_title='Values')
# Update x-axis tickformat to "06-Jan-2023" and rotate labels by 45 degrees to the left
fig1a.update_xaxes(type='category', tickangle=45)
fig1a.update_layout(hovermode='x unified', hoverdistance=0, yaxis=dict(scaleanchor="x", scaleratio=1))
fig1a.update_layout(title=f'{ticker1} : {data} : latest-{last_date}')
# Update layout for fig
fig1a.update_layout(range_slider_layout)
# Ensure synchronized x-axes with fig1
fig1a.update_xaxes(range=fig1['layout']['xaxis']['range'])
# legend
fig1a.update_layout(showlegend=False)
fig1a.update_layout(legend=dict(orientation="h", yanchor="bottom", y=0, xanchor="right", x=1))
# Display the security name for the second ticker above fig2
col2.text(security_name2)
# Create a Plotly figure for df2a
fig2a = go.Figure()
fig2a.add_trace(go.Scatter(x=df2a.index, y=df2a['annl_sharpe'], mode='lines', name='Annl SR'))
fig2a.add_trace(go.Scatter(x=df2a.index, y=df2a['annl_geom_mean_ret'], mode='lines', name='Annl Ret'))
fig2a.add_trace(go.Scatter(x=df1a.index, y=df1a['daily_skw'], mode='lines', name='Daily Skew'))
# Customize the layout for fig2a
fig2a.update_layout(title=f'{ticker2} - Performance Metrics',
yaxis_title='Values')
# Update x-axis tickformat to "06-Jan-2023" and rotate labels by 45 degrees to the left
fig2a.update_xaxes(type='category', tickangle=45)
fig2a.update_layout(hovermode='x unified', hoverdistance=0, yaxis=dict(scaleanchor="x", scaleratio=1))
fig2a.update_layout(title=f'{ticker2} : {data} : latest-{last_date}')
# Update layout for fig
fig2a.update_layout(range_slider_layout)
# legend
fig2a.update_layout(showlegend=True)
#fig2a.update_layout(legend=dict(orientation="h", yanchor="bottom", y=0, xanchor="right", x=1))
col1.plotly_chart(fig1,theme="streamlit", use_container_width=True) #use_container_width=True)
col2.plotly_chart(fig2,theme="streamlit", use_container_width=True) # use_container_width=True)
# Display the additional charts below the main charts
col1.plotly_chart(fig1a, use_container_width=True)
col2.plotly_chart(fig2a, use_container_width=True)
# Display the dataset with a checkbox to toggle visibility
show_pricing_dataset = st.sidebar.checkbox("Show Pricing Datasets", True)
if show_pricing_dataset:
# Get user input for custom SQL query in the sidebar for ticker1
query1 = '''SELECT cast(price_date as DATE),
open_price as open,
high_price as high,
low_price as low,
adj_close_price as adj,
close_price as close,
dividends as div
FROM daily_price
JOIN security ON security.id = daily_price.ticker_id
WHERE ticker = '{ticker}'
and daily_price.price_date < CURRENT_DATE+1
ORDER BY price_date desc;'''.format(ticker=ticker1)
# Get user input for custom SQL query in the sidebar for ticker2
query2 = '''SELECT cast(price_date as DATE),
open_price as open,
high_price as high,
low_price as low,
adj_close_price as adj,
close_price as close,
dividends as div
FROM daily_price
JOIN security ON security.id = daily_price.ticker_id
WHERE ticker = '{ticker}'
and daily_price.price_date < CURRENT_DATE+1
ORDER BY price_date desc;'''.format(ticker=ticker2)
# Replace {ticker} placeholder with the user's input in the SQL query
formatted_sql_query1 = query1.format(ticker=ticker1)
formatted_sql_query2 = query2.format(ticker=ticker2)
# Execute the formatted SQL query and fetch results into a DataFrame
df1 = pd.read_sql_query(formatted_sql_query1,
conn,
parse_dates=['price_date'],
index_col='price_date')
# Execute the formatted SQL query and fetch results into a DataFrame
df2 = pd.read_sql_query(formatted_sql_query2, conn, parse_dates=['price_date'], index_col='price_date')
# Display the dataset for ticker1 with formatting for adj_close_price
for prices in ['adj', 'close','high','low','open']:
df1[prices] = df1[prices].round(2)
df2[prices] = df2[prices].round(2)
# Set display options
pd.set_option('display.float_format', '{:.2f}'.format)
# Seperation between the two ticker dataframes
df1[' '] = ''
# Replace 0 values with empty strings
df1['div'] = df1['div'].replace(0, "")
df2['div'] = df2['div'].replace(0, "")
# Merge the two dataframes on 'price_date'
merged_df = pd.merge(df1,
df2,
how='outer',
left_index=True,
right_index=True,
suffixes=('_'+ticker1, '_'+ticker2))
#merged_df = pd.merge(df1, df2, how='outer')
# Sort dataframes by the latest date
merged_df = merged_df.sort_index(ascending=False)
#df1 = df1.sort_index(ascending=False)
# Format date column to 'YYYY-MM-DD'
merged_df.index = merged_df.index.strftime('%Y-%b-%d')
st.dataframe(merged_df)
# Display the dataset with a checkbox to toggle visibility
show_perf_dataset = st.sidebar.checkbox("Show Performance Datasets", True)
if show_perf_dataset:
# Get user input for custom SQL query in the sidebar for ticker1
query1 = '''SELECT date,_50ma, volume, obv, atr, wealth_index,
previous_peak, drawdown, daily_ret, daily_arith_mean_ret,
daily_std_ret, daily_skw, daily_krt, daily_rsi,
annl_geom_mean_ret, annl_arith_mean_ret, annl_stdev_ret,
annl_sharpe, wkly_geom_mean_ret, wkly_stdev_ret, mnthly_geom_mean_ret
FROM performance_individual_entity
JOIN security ON security.id = performance_individual_entity.ticker_id
WHERE ticker = '{ticker}'
ORDER BY date desc limit 5;'''.format(ticker=ticker1)
# Get user input for custom SQL query in the sidebar for ticker2
query2 = '''SELECT date, _50ma
FROM performance_individual_entity
JOIN security ON security.id = performance_individual_entity.ticker_id
WHERE ticker = '{ticker}'
ORDER BY date desc limit 5;'''.format(ticker=ticker2)
# Replace {ticker} placeholder with the user's input in the SQL query
formatted_sql_query1 = query1.format(ticker=ticker1)
formatted_sql_query2 = query2.format(ticker=ticker2)
# Execute the formatted SQL query and fetch results into a DataFrame
df1 = pd.read_sql_query(formatted_sql_query1,
conn, parse_dates=['date'], index_col='date')
# Execute the formatted SQL query and fetch results into a DataFrame
#df2 = pd.read_sql_query(formatted_sql_query2, conn, parse_dates=['date'], index_col='date')
for column in ['_50ma', 'atr', 'drawdown']:
df1[column] = df1[column].round(2)
st.dataframe(df1.T)
except psycopg2.Error as e:
st.error(f"Error connecting to the PostgreSQL database: {e}")
conn.close()