diff --git a/Dashboards/assets/styles.css b/Dashboards/assets/styles.css
index ffec91e..9cb6be9 100644
--- a/Dashboards/assets/styles.css
+++ b/Dashboards/assets/styles.css
@@ -1,25 +1,10 @@
-/* Set default text styling */
-body {
- color: #495057;
- /* color: #c35a25; */
- font-family: "Source Sans Pro", sans-serif;
- text-align: left;
- letter-spacing: normal;
-}
-
.graph-title {
text-align: left;
margin: 0;
+ padding: 5px;
}
.info-icon {
font-size: 15px;
margin-left: 5px;
}
-
-
-/* Text color of the selected value in the dropdown */
-.Select-value-label {
- color: #495057!important;
- /* color: #c35a25!important; */
-}
diff --git a/Dashboards/charts.py b/Dashboards/charts.py
index 2a29c98..8333e68 100644
--- a/Dashboards/charts.py
+++ b/Dashboards/charts.py
@@ -1,6 +1,35 @@
import plotly.express as px
import plotly.graph_objects as go
-from constants import BASE_COLOR_PALETTE, PASTEL_COLOR_PALETTE, PLOTLY_LAYOUT
+from Dashboards.constants import BASE_COLOR_PALETTE, PASTEL_COLOR_PALETTE, PLOTLY_LAYOUT, MAP_SETTINGS
+
+
+def create_map(filtered_df, selected_map_column, zoom, center):
+ fig_map = px.scatter_mapbox(
+ filtered_df,
+ lat="localisation_lat",
+ lon="localisation_long",
+ color=selected_map_column,
+ hover_name="date_obs",
+ hover_data=["localisation_lat", "localisation_long"],
+ mapbox_style="open-street-map",
+ color_discrete_map=MAP_SETTINGS[selected_map_column]["color_map"],
+ )
+
+ fig_map.update_layout(
+ PLOTLY_LAYOUT,
+ margin=dict(l=0, r=0, t=0, b=0),
+ mapbox_zoom=zoom,
+ mapbox_center=center,
+ legend=dict(
+ x=0.02, # Position the legend on the map
+ y=0.02,
+ bgcolor="rgba(255, 255, 255, 0.7)", # Semi-transparent background
+ bordercolor="grey",
+ borderwidth=1.5,
+ ),
+ )
+
+ return fig_map
def create_hist1_nb_species(observation_with_vdl_df, nb_species_clicked):
hist1 = px.histogram(
@@ -17,6 +46,15 @@ def create_hist1_nb_species(observation_with_vdl_df, nb_species_clicked):
bargap=0.1,
)
+ # Update hover template
+ hist1.update_traces(
+ hovertemplate=(
+ "Nombre d'espèces: %{x}
"
+ "Nombre de sites: %{y}
"
+ ""
+ )
+ )
+
# Add vertical line for the clicked number of species
if nb_species_clicked:
hist1.add_shape(
@@ -47,6 +85,15 @@ def create_hist2_vdl(observation_with_vdl_df, vdl_clicked):
bargap=0.1,
)
+ # Update hover template
+ hist2.update_traces(
+ hovertemplate=(
+ "VDL: %{x}
"
+ "Nombre de sites: %{y}
"
+ ""
+ )
+ )
+
# Add vertical line for the clicked VDL value
if vdl_clicked:
hist2.add_shape(
@@ -97,7 +144,6 @@ def create_hist3(lichen_frequency_df):
## Gauge charts
-
def create_gauge_chart(value, title=None):
fig = go.Figure(
go.Indicator(
@@ -125,7 +171,43 @@ def create_gauge_chart(value, title=None):
)
fig.update_layout(
- margin={'l': 30, 'r': 30, 'b': 0, 't': 0}
+ PLOTLY_LAYOUT,
+ margin=dict(l=0, r=0, t=20, b=10),
+ )
+
+ return fig
+
+
+def find_interval(intervals, value):
+ for i in range(len(intervals) - 1):
+ if intervals[i] <= value < intervals[i + 1]:
+ return i
+ if value >= intervals[-1]:
+ return len(intervals) - 1
+ return None
+
+def create_kpi(value, title=None, intervals=None, color_scale=None):
+
+ if intervals is None:
+ intervals = [0, 25, 50, 75, 100.5]
+ if color_scale is None:
+ color_scale = ['green', 'yellow', 'orange', 'red']
+
+ color_idx = find_interval(intervals, value)
+ color = color_scale[color_idx]
+
+ indicator = go.Indicator(
+ value=value,
+ number={"suffix": "%", "font": {"color": color, "size": 50}},
+ mode="number",
+ title={"text": title},
+ )
+
+ fig = go.Figure(indicator)
+
+ fig.update_layout(
+ PLOTLY_LAYOUT,
+ margin=dict(l=0, r=0, t=0, b=0),
)
return fig
diff --git a/Dashboards/constants.py b/Dashboards/constants.py
index d19cacb..c6f3c5c 100644
--- a/Dashboards/constants.py
+++ b/Dashboards/constants.py
@@ -1,22 +1,53 @@
-from utils.css_reader import get_css_properties
+BODY_FONT_FAMILY = '"Source Sans Pro", sans-serif'
+PLOTLY_FONT_COLOR = "#495057" # Grey
# Constants for color palettes, font families, etc.
+# BASE_COLOR_PALETTE = [
+# "#387CA6",
+# "#1C6C8C",
+# "#3887A6",
+# "#ADCCD9",
+# "#F2F2F2"
+# ]
+
+
+# Generated with https://omatsuri.app/color-shades-generator
+
BASE_COLOR_PALETTE = [
+ # "#333D43",
+ # "#37444C",
+ # "#3A4C58",
+ # "#3C5665",
+ # "#3D6176",
+ # "#3C6D8C",
"#387CA6",
- "#1C6C8C",
- "#3887A6",
- "#ADCCD9",
- "#F2F2F2"
+ "#4A86AB",
+ "#608FAD",
+ "#799AAF",
+ "#90A7B5",
+ "#A6B6BF",
+ "#BDC6CC",
]
PASTEL_COLOR_PALETTE = [
- '#c3d7e4',
- '#bad2dc',
- '#c3dbe4',
- '#e6eff3',
- '#fbfbfb'
+ # "#c1c4c6",
+ # "#c3c6c9",
+ # "#c3c9cc",
+ # "#c4ccd0",
+ # "#c4cfd5",
+ # "#c4d3dc",
+ "#c3d7e4",
+ "#c8dae5",
+ "#cfdde6",
+ "#d6e0e7",
+ "#dde4e8",
+ "#e4e9eb",
+ "#ebedef",
]
+
+SQUARE_COLUMNS = [f'sq{i}' for i in range(1, 6)]
+
ORIENTATIONS_MAPPING = {
"N": "Nord",
"E": "Est",
@@ -26,11 +57,16 @@
ORIENTATIONS = list(ORIENTATIONS_MAPPING.keys())
-SQUARE_COLUMNS = ['sq1', 'sq2', 'sq3', 'sq4', 'sq5']
-
-BODY_STYLE = get_css_properties("body")
-BODY_FONT_FAMILY = BODY_STYLE.get("font-family", "Arial")
-BODY_FONT_COLOR = BODY_STYLE.get("color", "grey")
+MAP_SETTINGS = {
+ "nb_species_cat": {
+ "title": "Nombre d'espèces",
+ "color_map": {'<7': 'red', '7-10': 'orange', '11-14': 'yellow', '>14': 'green'}
+ },
+ "VDL_cat": {
+ "title": "VDL",
+ "color_map": {'<5': 'red', '5-10': 'orange', '10-15': 'yellow', '>15': 'green'}
+ }
+}
# Define the plotly style for hover labels
PLOTLY_HOVER_STYLE = {
@@ -41,12 +77,11 @@
# Define the plotly layout for all plots
PLOTLY_LAYOUT = {
- "font": dict(
- family=BODY_FONT_FAMILY,
- color=BODY_FONT_COLOR
- ),
+ "font": dict(family=BODY_FONT_FAMILY, color=PLOTLY_FONT_COLOR),
"template": "plotly_white",
- "margin": dict(l=10, r=10, t=10, b=10),
- "barcornerradius":"30%",
- "hoverlabel": PLOTLY_HOVER_STYLE
+ "margin": dict(l=0, r=0, t=10, b=10),
+ "barcornerradius": "30%",
+ "hoverlabel": PLOTLY_HOVER_STYLE,
+ "plot_bgcolor": "rgba(0, 0, 0, 0)", # Transparent plot background
+ "paper_bgcolor": "rgba(0, 0, 0, 0)", # Transparent paper background
}
diff --git a/Dashboards/dashboard.py b/Dashboards/dashboard.py
index 47dc06a..2710dc7 100644
--- a/Dashboards/dashboard.py
+++ b/Dashboards/dashboard.py
@@ -1,15 +1,16 @@
-import plotly.express as px
import dash_mantine_components as dmc
import pandas as pd
from dash import Dash, _dash_renderer, html, dcc, Output, Input, callback
from dash.dependencies import State
+from dash.exceptions import PreventUpdate
from dash_iconify import DashIconify
from datetime import datetime
-from my_data.datasets import get_lichen_data, get_lichen_species_data, get_tree_data, get_observation_data, get_table_data, get_lichen_ecology
-from my_data.computed_datasets import merge_tables, vdl_value, count_lichen, count_lichen_per_species, count_species_per_observation, count_lichen_per_lichen_id, df_frequency
-from charts import create_hist1_nb_species, create_hist2_vdl, create_hist3, create_hist4, create_gauge_chart
+from Dashboards.my_data.datasets import get_useful_data
+from Dashboards.my_data.computed_datasets import merge_tables, vdl_value, count_lichen, count_lichen_per_species, count_species_per_observation, count_lichen_per_lichen_id, df_frequency
+from Dashboards.charts import create_map, create_hist1_nb_species, create_hist2_vdl, create_hist3, create_hist4, create_gauge_chart, create_kpi
+from Dashboards.constants import MAP_SETTINGS, BASE_COLOR_PALETTE, BODY_FONT_FAMILY
_dash_renderer._set_react_version("18.2.0")
# run with : python Dashboards/dashboard.py
@@ -27,17 +28,12 @@
# Get the datasets
# environment_df = get_environment_data()
print("Fetching data...")
-lichen_df = get_lichen_data()
-lichen_species_df = get_lichen_species_data()
-observation_df = get_observation_data()
-table_df = get_table_data()
-tree_df = get_tree_data()
-ecology_df = get_lichen_ecology()
-
-## For tab on observations
+lichen_df, lichen_species_df, observation_df, table_df, tree_df, ecology_df = get_useful_data()
+
+
+# For tab on observations
merged_table_df = merge_tables(table_df, lichen_df, lichen_species_df, observation_df)
merged_table_with_nb_lichen_df = count_lichen(merged_table_df)
-nb_lichen_per_lichen_id_df = count_lichen_per_lichen_id(merged_table_with_nb_lichen_df , lichen_df, lichen_species_df)
observation_with_species_count_df = count_species_per_observation(lichen_df, observation_df)
observation_with_vdl_df = vdl_value(observation_with_species_count_df, merged_table_with_nb_lichen_df)
@@ -50,40 +46,25 @@
# Calcul du degrés d'artificialisation
def calc_deg_artif(observation_id: int):
- global_freq = grouped_df[grouped_df['id']== observation_id]['freq'].sum()
- base_freq = grouped_df[(grouped_df['id'] == observation_id) & (grouped_df['poleotolerance'] == 'resistant')]['freq'].sum()
+ global_freq = grouped_df[grouped_df['observation_id']== observation_id]['freq'].sum()
+ base_freq = grouped_df[(grouped_df['observation_id'] == observation_id) & (grouped_df['poleotolerance'] == 'resistant')]['freq'].sum()
return round((base_freq / global_freq) * 100, 2)
# Calcul de la pollution acidé
def calc_pollution_acide(observation_id: int):
- global_freq = grouped_df[grouped_df['id']== observation_id]['freq'].sum()
- acid_freq = grouped_df[(grouped_df['id'] == observation_id) & (grouped_df['ph'] == 'acidophilous')]['freq'].sum()
+ global_freq = grouped_df[grouped_df['observation_id']== observation_id]['freq'].sum()
+ acid_freq = grouped_df[(grouped_df['observation_id'] == observation_id) & (grouped_df['ph'] == 'acidophilous')]['freq'].sum()
return round((acid_freq / global_freq) * 100, 2)
# Calcul de la pollution azoté
def calc_pollution_azote(observation_id: int):
- global_freq = grouped_df[grouped_df['id']== observation_id]['freq'].sum()
- azote_freq = grouped_df[(grouped_df['id'] == observation_id) & (grouped_df['eutrophication'] == 'eutrophic')]['freq'].sum()
+ global_freq = grouped_df[grouped_df['observation_id']== observation_id]['freq'].sum()
+ azote_freq = grouped_df[(grouped_df['observation_id'] == observation_id) & (grouped_df['eutrophication'] == 'eutrophic')]['freq'].sum()
return round((azote_freq / global_freq) * 100, 2)
-## Map
-
-# Colors for the map
-color_dict_nb_species = {'<7': 'red', '7-10': 'orange', '11-14': 'yellow', '>14': 'green'} # number of species
-color_dict_vdl = {'<5': 'red', '5-10': 'orange', '10-15': 'yellow', '>15': 'green'} # VDL
-
-# Dictionnaire de couleurs à utiliser pour chaque variable
-map_color_palettes = {
- 'nb_species_cat': color_dict_nb_species,
- 'VDL_cat': color_dict_vdl,
-}
-
-# Liste des variables disponibles pour afficher sur la carte
-map_columns = list(map_color_palettes.keys())
-
# Callback pour mettre à jour la carte et l'histogramme en fonction des dates sélectionnées
@callback(
@@ -95,20 +76,27 @@ def calc_pollution_azote(observation_id: int):
Output('vdl-hist2', 'figure'),
Output('hist3','figure'),
- Input('date-picker-range', 'start_date'),
- Input('date-picker-range', 'end_date'),
- Input('column-dropdown', 'value'),
+ Input('date-picker-range', 'value'),
+ Input('map-column-select', 'value'),
Input('species-map', 'clickData'),
State('species-map', 'relayoutData') # État actuel du zoom et de la position de la carte
)
-def update_map(start_date, end_date, selected_column, clickData, relayoutData):
- start_date = pd.to_datetime(start_date).date()
- end_date = pd.to_datetime(end_date).date()
+def update_dashboard1(date_range, selected_map_column, clickData, relayoutData):
+ # Avoid updating when one of the date is None (not selected)
+ if None in date_range:
+ raise PreventUpdate
+
+ start_date = pd.to_datetime(date_range[0]).date()
+ end_date = pd.to_datetime(date_range[1]).date()
- # Filtrer le dataframe pour correspondre aux dates sélectionnées
- filtered_df = observation_with_vdl_df[(observation_with_vdl_df['date_obs'] >= start_date) & (observation_with_vdl_df['date_obs'] <= end_date)]
+ # Filter the data based on the selected date range
+ filtered_observation_with_vdl_df = observation_with_vdl_df[(observation_with_vdl_df['date_obs'] >= start_date) & (observation_with_vdl_df['date_obs'] <= end_date)]
+ filtered_table_with_nb_lichen_df = merged_table_with_nb_lichen_df[(merged_table_with_nb_lichen_df['date_obs'] >= start_date) & (merged_table_with_nb_lichen_df['date_obs'] <= end_date)]
+
+ # Count lichen per lichen_id on filtered table
+ filtered_nb_lichen_per_lichen_id_df = count_lichen_per_lichen_id(filtered_table_with_nb_lichen_df, lichen_df, lichen_species_df)
# Si le zoom et la position actuels sont disponibles, les utiliser, sinon définir des valeurs par défaut
if relayoutData and "mapbox.zoom" in relayoutData and "mapbox.center" in relayoutData:
@@ -116,62 +104,51 @@ def update_map(start_date, end_date, selected_column, clickData, relayoutData):
current_center = relayoutData["mapbox.center"]
else:
current_zoom = 4.8 # Valeur par défaut du zoom
- current_center = {"lat": filtered_df['localisation_lat'].mean() + 0.5, "lon": filtered_df['localisation_long'].mean()}
-
+ current_center = {"lat": filtered_observation_with_vdl_df['localisation_lat'].mean() + 0.5, "lon": filtered_observation_with_vdl_df['localisation_long'].mean()}
# Afficher la carte
- fig_map = px.scatter_mapbox(filtered_df, lat='localisation_lat', lon='localisation_long',
- color=selected_column,
- hover_name='date_obs', hover_data=['localisation_lat', 'localisation_long'],
- mapbox_style="open-street-map",
- color_discrete_map=map_color_palettes[selected_column]
- )
-
- fig_map.update_layout(mapbox_zoom=current_zoom,
- mapbox_center=current_center,
- margin=dict(l=10, r=10, t=0, b=0),
- )
+ fig_map = create_map(filtered_observation_with_vdl_df, selected_map_column, current_zoom, current_center)
# Initialize variables
nb_species_clicked = None
vdl_clicked = None
observation_id_clicked = 503 # Default observation ID, to be improved
- # Initalise the filtered dataframe (unfiltered by default), and sum over all data
- filtered_nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df.groupby('species_id').agg({
- 'nb_lichen': 'sum',
- 'nb_lichen_N': 'sum',
- 'nb_lichen_S': 'sum',
- 'nb_lichen_O': 'sum',
- 'nb_lichen_E': 'sum',
- 'name': 'first'
- }).reset_index().rename(columns={'name': 'unique_name'}).sort_values(by='nb_lichen', ascending=True)
-
# If a point on the map is clicked, identify the observation ID, number of species and VDL
if clickData is not None:
lat_clicked = clickData['points'][0]['lat']
lon_clicked = clickData['points'][0]['lon']
- observation_clicked = filtered_df[(filtered_df['localisation_lat'] == lat_clicked) & (filtered_df['localisation_long'] == lon_clicked)]
+ observation_clicked = filtered_observation_with_vdl_df[(filtered_observation_with_vdl_df['localisation_lat'] == lat_clicked) & (filtered_observation_with_vdl_df['localisation_long'] == lon_clicked)]
if not observation_clicked.empty:
observation_clicked = observation_clicked.iloc[0] # Take the first element matching the latitude and longitude
- observation_id_clicked = observation_clicked['id']
+ observation_id_clicked = observation_clicked['observation_id']
nb_species_clicked = observation_clicked['nb_species']
vdl_clicked = observation_clicked['VDL']
- filtered_nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df[nb_lichen_per_lichen_id_df['observation_id'] == observation_id_clicked]
+ filtered_nb_lichen_per_lichen_id_df = filtered_nb_lichen_per_lichen_id_df[filtered_nb_lichen_per_lichen_id_df['observation_id'] == observation_id_clicked]
+ else:
+ # If no observation is clicked, show all observations data
+ filtered_nb_lichen_per_lichen_id_df = filtered_nb_lichen_per_lichen_id_df.groupby('species_id').agg({
+ 'nb_lichen': 'sum',
+ 'nb_lichen_N': 'sum',
+ 'nb_lichen_S': 'sum',
+ 'nb_lichen_O': 'sum',
+ 'nb_lichen_E': 'sum',
+ 'name': 'first'
+ }).reset_index().rename(columns={'name': 'unique_name'}).sort_values(by='nb_lichen', ascending=True)
deg_artif = calc_deg_artif(observation_id_clicked)
pollution_acide = calc_pollution_acide(observation_id_clicked)
pollution_azote = calc_pollution_azote(observation_id_clicked)
- gauge_chart1 = create_gauge_chart(deg_artif)
+ gauge_chart1 = create_kpi(deg_artif)
gauge_chart2 = create_gauge_chart(pollution_acide)
gauge_chart3 = create_gauge_chart(pollution_azote)
- hist1_nb_species = create_hist1_nb_species(filtered_df, nb_species_clicked)
- hist2_vdl = create_hist2_vdl(filtered_df, vdl_clicked)
+ hist1_nb_species = create_hist1_nb_species(filtered_observation_with_vdl_df, nb_species_clicked)
+ hist2_vdl = create_hist2_vdl(filtered_observation_with_vdl_df, vdl_clicked)
hist3 = create_hist3(filtered_nb_lichen_per_lichen_id_df)
return fig_map, gauge_chart1, gauge_chart2, gauge_chart3, hist1_nb_species, hist2_vdl, hist3
@@ -185,6 +162,15 @@ def update_map(start_date, end_date, selected_column, clickData, relayoutData):
def update_hist4(user_selection_species_id):
return create_hist4(nb_lichen_per_species_df, user_selection_species_id)
+
+## Initialize all the graphs (not really necessary, but improves loading time)
+
+date_range = [observation_with_vdl_df["date_obs"].min(), datetime.now().date()]
+selected_map_column = list(MAP_SETTINGS.keys())[0]
+clickData = None
+relayoutData = None
+fig_map, gauge_chart1, gauge_chart2, gauge_chart3, hist1_nb_species, hist2_vdl, hist3 = update_dashboard1(date_range, selected_map_column, clickData, relayoutData)
+
# Create options for the user species dropdown
user_species_options = [
{"label": row["name"], "value": row["species_id"]}
@@ -198,20 +184,22 @@ def update_hist4(user_selection_species_id):
sites_layout = [
# Divider for the date picker
html.Div(
- [
- dcc.DatePickerRange(
+ style={"padding": "10px"},
+ children=[
+ # Widget for the date filter
+ dmc.DatePicker(
id="date-picker-range",
- min_date_allowed=observation_with_vdl_df["date_obs"].min(),
- max_date_allowed=datetime.now().date(),
- start_date=observation_with_vdl_df["date_obs"].min(),
- end_date=datetime.now().date(),
- initial_visible_month=datetime.now().date(),
- display_format="DD/MM/YYYY",
- clearable=False,
- updatemode="bothdates", # Only update callback when both dates are selected
- first_day_of_week=2, # Monday
+ minDate=observation_with_vdl_df["date_obs"].min(),
+ maxDate=datetime.now().date(),
+ type="range",
+ value=[
+ observation_with_vdl_df["date_obs"].min(),
+ datetime.now().date(),
+ ],
+ valueFormat="DD/MM/YYYY",
+ w=200, # width
),
- ]
+ ],
),
# Divider for the 2 columns
html.Div(
@@ -233,75 +221,130 @@ def update_hist4(user_selection_species_id):
"gap": "10px",
},
children=[
- html.H3(
+ dmc.Title(
"Carte des observations",
+ order=4,
className="graph-title",
),
- dcc.Dropdown(
- id="column-dropdown",
- options=[
- {"label": col, "value": col} for col in map_columns
+ # Selector for the map column
+ dmc.SegmentedControl(
+ id="map-column-select",
+ value=list(MAP_SETTINGS.keys())[0],
+ data=[
+ {"label": MAP_SETTINGS[col]["title"], "value": col}
+ for col in MAP_SETTINGS
],
- value="nb_species_cat", # Default value
- style={"width": "50%"},
- clearable=False,
+ transitionDuration=500,
),
],
),
- dcc.Graph(
- id="species-map",
- style={
- "width": "100%",
- "display": "inline-block",
- "margin": "5px auto",
- },
+ html.Div(
+ style={"padding": "5px"},
+ children=[
+ dmc.Card(
+ children=[
+ dcc.Graph(
+ id="species-map",
+ figure=fig_map,
+ config={
+ "displaylogo": False, # Remove plotly logo
+ },
+ ),
+ ],
+ withBorder=True,
+ shadow="sm",
+ style={"padding": "0"},
+ ),
+ ],
),
# Divider for the gauge charts, with 3 columns each
html.Div(
- style={
- "display": "flex",
- "gap": "10px"
- },
+ style={"display": "flex", "gap": "10px", "padding": "5px"},
children=[
html.Div(
style={"flex": "1"},
children=[
- html.H3(
- "Degré d'artificialisation",
- className="graph-title",
- style={"textAlign": "center"}
- ),
- dcc.Graph(
- id="gauge-chart1",
- style={"height": "100px"},
+ dmc.Card(
+ children=[
+ dmc.Title(
+ "Degré d'artificialisation",
+ order=4,
+ style={
+ "textAlign": "left",
+ "margin": "0px",
+ "padding": "0px",
+ },
+ ),
+ dcc.Graph(
+ id="gauge-chart1",
+ figure=gauge_chart1,
+ style={"height": "70px"},
+ config={
+ "displayModeBar": False,
+ },
+ ),
+ ],
+ withBorder=True,
+ shadow="sm",
+ style={"padding-top": "5px"},
),
],
),
html.Div(
style={"flex": "1"},
children=[
- html.H3(
- "Pollution acide",
- className="graph-title",
- style={"textAlign": "center"}
- ),
- dcc.Graph(
- id="gauge-chart2",
- style={"height": "100px"},
+ dmc.Card(
+ children=[
+ dmc.Title(
+ "Pollution acide",
+ order=4,
+ style={
+ "textAlign": "left",
+ "margin": "0px",
+ "padding": "0px",
+ },
+ ),
+ dcc.Graph(
+ id="gauge-chart2",
+ figure=gauge_chart2,
+ style={"height": "100px"},
+ config={
+ "displayModeBar": False,
+ },
+ ),
+ ],
+ withBorder=True,
+ shadow="sm",
+ style={"padding-top": "5px"},
),
],
),
html.Div(
style={"flex": "1"},
children=[
- html.H3(
- "Pollution azote",
- className="graph-title",
- style={"textAlign": "center"},
- ),
- dcc.Graph(
- id="gauge-chart3",
- style={"height": "100px"}
+ dmc.Card(
+ children=[
+ dmc.Title(
+ "Pollution azote",
+ order=4,
+ style={
+ "textAlign": "left",
+ "margin": "0px",
+ "padding": "0px",
+ },
+ ),
+ dcc.Graph(
+ id="gauge-chart3",
+ figure=gauge_chart3,
+ style={"height": "100px"},
+ config={
+ "displayModeBar": False,
+ },
+ ),
+ ],
+ withBorder=True,
+ shadow="sm",
+ style={"padding-top": "5px"},
),
],
),
@@ -332,8 +375,9 @@ def update_hist4(user_selection_species_id):
"align-items": "center",
},
children=[
- html.H3(
+ dmc.Title(
"Distribution du nombre d'espèces",
+ order=4,
className="graph-title",
),
dmc.Tooltip(
@@ -349,7 +393,11 @@ def update_hist4(user_selection_species_id):
),
dcc.Graph(
id="species-hist1",
+ figure=hist1_nb_species,
style={"height": "300px"},
+ config={
+ "displaylogo": False, # Remove plotly logo
+ },
),
],
),
@@ -364,8 +412,9 @@ def update_hist4(user_selection_species_id):
"align-items": "center",
},
children=[
- html.H3(
+ dmc.Title(
"Distribution de VDL",
+ order=4,
className="graph-title",
),
dmc.Tooltip(
@@ -381,7 +430,11 @@ def update_hist4(user_selection_species_id):
),
dcc.Graph(
id="vdl-hist2",
+ figure=hist2_vdl,
style={"height": "300px"},
+ config={
+ "displaylogo": False, # Remove plotly logo
+ },
),
],
),
@@ -392,8 +445,9 @@ def update_hist4(user_selection_species_id):
html.Div(
style={"display": "flex", "align-items": "center"},
children=[
- html.H3(
+ dmc.Title(
"Espèces observées sur le site sélectionné",
+ order=4,
className="graph-title",
),
dmc.Tooltip(
@@ -409,7 +463,11 @@ def update_hist4(user_selection_species_id):
),
dcc.Graph(
id="hist3",
+ figure=hist3,
style={"height": "300px"},
+ config={
+ "displaylogo": False, # Remove plotly logo
+ },
),
]
),
@@ -426,8 +484,9 @@ def update_hist4(user_selection_species_id):
[
html.Div(
[
- html.H3(
- "Espèces les plus observées par les observateurs Lichens GO",
+ dmc.Title(
+ "Espèces les plus observées",
+ order=4,
className="graph-title",
),
dmc.Tooltip(
@@ -469,34 +528,75 @@ def update_hist4(user_selection_species_id):
"margin-left": "20px",
},
),
- dcc.Graph(id="hist4", figure=hist4),
+ dcc.Graph(
+ id="hist4",
+ figure=hist4,
+ config={
+ "displaylogo": False, # Remove plotly logo
+ },
+ ),
],
span=8,
- ),
- dmc.GridCol(
- [dcc.Graph(figure={}, id="graph-placeholder")],
- span=4,
- ),
+ )
]
)
+
+# Toggle to switch between light and dark theme
+theme_toggle = dmc.ActionIcon(
+ [
+ dmc.Paper(DashIconify(icon="radix-icons:sun", width=25), darkHidden=True),
+ dmc.Paper(DashIconify(icon="radix-icons:moon", width=25), lightHidden=True),
+ ],
+ variant="transparent",
+ id="color-scheme-toggle",
+ size="lg",
+ ms="auto",
+)
+
+
+# Callback to switch between light and dark theme
+@callback(
+ Output("mantine-provider", "forceColorScheme"),
+ Input("color-scheme-toggle", "n_clicks"),
+ State("mantine-provider", "forceColorScheme"),
+ prevent_initial_call=True,
+)
+def switch_theme(_, theme):
+ return "dark" if (theme == "light" or theme is None) else "light"
+
+
+# Theme for the app
+dmc_theme = {
+ "colors": {
+ "myBlue": BASE_COLOR_PALETTE[::-1], # Reverse the color palette
+ },
+ "primaryColor": "myBlue",
+ "fontFamily": BODY_FONT_FAMILY,
+ "defaultRadius": "md", # Default radius for cards
+}
+
+
# Define the main layout with tabs
app.layout = dmc.MantineProvider(
- [
+ id="mantine-provider",
+ theme=dmc_theme,
+ children=[
dmc.Tabs(
[
dmc.TabsList(
[
dmc.TabsTab("Sites", value="1"),
dmc.TabsTab("Espèces", value="2"),
- ]
+ theme_toggle,
+ ],
),
dmc.TabsPanel(sites_layout, value="1"),
dmc.TabsPanel(species_layout, value="2"),
],
value="1", # Default to the first tab
- )
- ]
+ ),
+ ],
)
diff --git a/Dashboards/my_data/computed_datasets.py b/Dashboards/my_data/computed_datasets.py
index a6b0fbc..8156f88 100644
--- a/Dashboards/my_data/computed_datasets.py
+++ b/Dashboards/my_data/computed_datasets.py
@@ -1,19 +1,14 @@
import pandas as pd
import numpy as np
-
-# chemin_dossier_parent = Path(__file__).parent.parent
-# sys.path.append(str(chemin_dossier_parent))
-
-square_columns = [f'sq{i}' for i in range(1, 6)]
-orientations = ['N', 'E', 'S', 'O']
+from Dashboards.constants import SQUARE_COLUMNS, ORIENTATIONS
# Merge table_df with lichen_df, lichen_species_df and observation_df
def merge_tables(table_df, lichen_df, lichen_species_df, observation_df):
- merged_df = table_df.merge(lichen_df, left_on='lichen_id', right_on='id', suffixes=('', '_l'), how='left')
- merged_df = merged_df.merge(lichen_species_df, left_on='species_id', right_on='id', suffixes=('', '_ls'), how='left')
- merged_df = merged_df.merge(observation_df, left_on='observation_id', right_on='id', suffixes=('', '_o'), how ='left')
+ merged_df = table_df.merge(lichen_df, on='lichen_id', suffixes=('', '_l'), how='left')
+ merged_df = merged_df.merge(lichen_species_df, on='species_id', suffixes=('', '_ls'), how='left')
+ merged_df = merged_df.merge(observation_df, on='observation_id', suffixes=('', '_o'), how ='left')
return merged_df
@@ -25,17 +20,17 @@ def count_lichen(table_df):
table_with_nb_lichen_df = table_df.copy()
# Concatenate all square_columns into a single list per row
- table_with_nb_lichen_df['concatenated_squares'] = table_with_nb_lichen_df[square_columns].sum(axis=1)
+ table_with_nb_lichen_df['concatenated_squares'] = table_with_nb_lichen_df[SQUARE_COLUMNS].sum(axis=1)
# Calculate lichen per orientation
- for orientation in orientations:
- table_with_nb_lichen_df[orientation] = table_with_nb_lichen_df['concatenated_squares'].apply(lambda x, orientation: x.count(orientation))
+ for orientation in ORIENTATIONS:
+ table_with_nb_lichen_df[orientation] = table_with_nb_lichen_df['concatenated_squares'].apply(lambda x, orientation=orientation: x.count(orientation))
# Calculate total number of lichen by summing over all orientations
- table_with_nb_lichen_df["nb_lichen"] = table_with_nb_lichen_df[orientations].sum(axis=1)
+ table_with_nb_lichen_df["nb_lichen"] = table_with_nb_lichen_df[ORIENTATIONS].sum(axis=1)
# Rename the orientations count columns
- table_with_nb_lichen_df.rename(columns={orientation:f'nb_lichen_{orientation}' for orientation in orientations}, inplace=True)
+ table_with_nb_lichen_df.rename(columns={orientation:f'nb_lichen_{orientation}' for orientation in ORIENTATIONS}, inplace=True)
# Drop concatenated_squares column
table_with_nb_lichen_df.drop(columns=['concatenated_squares'], inplace=True)
@@ -45,35 +40,35 @@ def count_lichen(table_df):
def vdl_value(observation_df, table_with_nb_lichen_df):
- columns = ['observation_id'] + [f'nb_lichen_{orientation}' for orientation in orientations] + ['nb_lichen']
+ columns = ['observation_id'] + [f'nb_lichen_{orientation}' for orientation in ORIENTATIONS] + ['nb_lichen']
vdl_df = table_with_nb_lichen_df[columns]
# Calculate the lichen diversity value (VDL) per observation
vdl_df = vdl_df.groupby('observation_id').sum() # Sum over all lichen species per observation
vdl_df['VDL'] = vdl_df['nb_lichen'] / 15 # /5 pour le nombre de carrés par grille, /3 pour le nombre d'arbre par observation
- vdl_df["VDL_cat"] = pd.cut(vdl_df["VDL"], bins=[-1, 5, 10, 15, np.inf], labels=["<5", "5-10", "10-15", ">15"])
+ vdl_df["VDL_cat"] = pd.cut(vdl_df["VDL"], bins=[-1, 4.999, 10, 15, np.inf], labels=["<5", "5-10", "10-15", ">15"])
- observation_with_vdl_df = observation_df.merge(vdl_df, left_on='id', right_on='observation_id', how='left')
+ observation_with_vdl_df = observation_df.merge(vdl_df, on='observation_id', how='left')
return observation_with_vdl_df
"""
Count the number of lichen per lichen ID.
"""
def count_lichen_per_lichen_id(table_with_nb_lichen_df, lichen_df, lichen_species_df):
- # Define the columns to be used for grouping and summing
- columns = ['lichen_id'] + [f'nb_lichen_{orientation}' for orientation in orientations] + ['nb_lichen']
+ # Define the columns to be used for grouping and summing
+ columns = ['lichen_id'] + [f'nb_lichen_{orientation}' for orientation in ORIENTATIONS] + ['nb_lichen']
# Group by 'lichen_id' and sum the counts for each orientation and the total count
- nb_lichen_per_lichen_id_df = table_with_nb_lichen_df[columns].groupby('lichen_id').sum()
+ nb_lichen_per_lichen_id_df = table_with_nb_lichen_df[columns].groupby('lichen_id').sum().reset_index()
# Merge the grouped DataFrame with the lichen DataFrame to add lichen information
- nb_lichen_per_lichen_id_df = lichen_df.merge(nb_lichen_per_lichen_id_df, how='left', left_on='id', right_on='lichen_id')
+ nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df.merge(lichen_df, how='left', on='lichen_id')
# Merge the result with the lichen species DataFrame to add species information
- nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df.merge(lichen_species_df, how='left', left_on='species_id', right_on='id', suffixes=['', '_s'])
+ nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df.merge(lichen_species_df, how='left', on='species_id', suffixes=['', '_s'])
# Sort by observation_id and number of lichen in ascending order
- nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df.sort_values(by=['observation_id','nb_lichen'], ascending=True)
+ nb_lichen_per_lichen_id_df = nb_lichen_per_lichen_id_df.sort_values(by=['observation_id','nb_lichen'], ascending=True, ignore_index=True)
# Rename the repeated lichen names with a unique name
nb_lichen_per_lichen_id_df = unique_lichen_name(nb_lichen_per_lichen_id_df)
@@ -88,7 +83,7 @@ def count_lichen_per_lichen_id(table_with_nb_lichen_df, lichen_df, lichen_specie
def unique_lichen_name(nb_lichen_per_lichen_id_df):
# Filter lichen that are not unique
- non_unique_lichen = nb_lichen_per_lichen_id_df.loc[~nb_lichen_per_lichen_id_df['unique'], ['id', 'observation_id', 'nb_lichen', 'name']]
+ non_unique_lichen = nb_lichen_per_lichen_id_df.loc[~nb_lichen_per_lichen_id_df['unique'], ['lichen_id', 'observation_id', 'nb_lichen', 'name']]
# Sort by 'nb_lichen' in descending order to have suffix _1, _2, _3, etc. for the most frequent lichen
non_unique_lichen = non_unique_lichen.sort_values(by='nb_lichen', ascending=False)
@@ -102,7 +97,7 @@ def unique_lichen_name(nb_lichen_per_lichen_id_df):
non_unique_lichen['unique_name'] = (non_unique_lichen['name'] + " " + suffix)
# Merge unique names with original df
- merged_df = nb_lichen_per_lichen_id_df.merge(non_unique_lichen[['id', 'unique_name']], on='id', how='left')
+ merged_df = nb_lichen_per_lichen_id_df.merge(non_unique_lichen[['lichen_id', 'unique_name']], on='lichen_id', how='left')
# Replace NaN unique names by name
merged_df['unique_name'] = merged_df['unique_name'].combine_first(merged_df['name'])
@@ -134,10 +129,15 @@ def count_species_per_observation(lichen_df, observation_df):
count_species_per_observation_df = lichen_df['observation_id'].value_counts().to_frame().rename(columns={"count":"nb_species"})
# Merge with observation_df
- observation_with_species_count_df = observation_df.merge(count_species_per_observation_df, how='left', left_on='id', right_on='observation_id')
+ observation_with_species_count_df = observation_df.merge(count_species_per_observation_df, how='left', on='observation_id')
# Add a categorical column based on the number of lichen
- observation_with_species_count_df["nb_species_cat"] = pd.cut(observation_with_species_count_df["nb_species"], bins=[-1, 6, 11, 15, np.inf], labels = ["<7", "7-10", "11-14", ">14"])
+ observation_with_species_count_df["nb_species_cat"] = pd.cut(
+ observation_with_species_count_df["nb_species"],
+ bins=[0, 7, 10.5, 15, np.inf],
+ labels = ["<7", "7-10", "11-14", ">14"],
+ right=False
+ )
return observation_with_species_count_df
@@ -170,7 +170,7 @@ def count_lichen_per_species(lichen_df, lichen_species_df):
)
# Merge with species names
- count_lichen_per_species_df = count_lichen_per_species_df.merge(lichen_species_df[['id', 'name']], left_on='species_id', right_on='id').drop(columns='id')
+ count_lichen_per_species_df = count_lichen_per_species_df.merge(lichen_species_df[['species_id', 'name']], on='species_id')
# Sort based on occurrences in descending order
count_lichen_per_species_df = count_lichen_per_species_df.sort_values(by='count', ascending=False).reset_index(drop=True)
@@ -190,9 +190,9 @@ def df_frequency(lichen_df, lichen_species_df, observation_df, table_df, ecology
)
# Joindre table avec lichen et observation
- merged_df = table_df.merge(lichen_df, left_on='lichen_id', right_on='id', suffixes=('', '_l'))
- merged_df = merged_df.merge(lichen_species_df, left_on='species_id', right_on='id', suffixes=('', '_ls'))
- merged_df = merged_df.merge(observation_df, left_on='observation_id', right_on='id', suffixes=('', '_o'))
+ merged_df = table_df.merge(lichen_df, on='lichen_id', suffixes=('', '_l'))
+ merged_df = merged_df.merge(lichen_species_df, on='species_id', suffixes=('', '_ls'))
+ merged_df = merged_df.merge(observation_df, on='observation_id', suffixes=('', '_o'))
# Grouper par 'species' et 'observation_id' et additionner les fréquences
grouped_df = merged_df.groupby(['name', 'observation_id'])['freq'].sum().reset_index()
@@ -204,9 +204,7 @@ def df_frequency(lichen_df, lichen_species_df, observation_df, table_df, ecology
grouped_df = grouped_df[['observation_id', 'name', 'freq','pH','eutrophication', 'poleotolerance']]
grouped_df = grouped_df.rename(
columns={
- 'observation_id': 'id',
'name': 'lichen',
- 'freq': 'freq',
'pH': 'ph',
'eutrophication': 'eutrophication',
'poleotolerance': 'poleotolerance'
diff --git a/Dashboards/my_data/datasets.py b/Dashboards/my_data/datasets.py
index f731742..8cc0496 100644
--- a/Dashboards/my_data/datasets.py
+++ b/Dashboards/my_data/datasets.py
@@ -130,3 +130,22 @@ def get_lichen_frequency():
"poleotolerance": lichen_frequency.poleotolerance
})
return pd.DataFrame(lichen_frequency_data)
+
+
+def get_useful_data():
+ lichen_df = get_lichen_data()
+ lichen_species_df = get_lichen_species_data()
+ observation_df = get_observation_data()
+ table_df = get_table_data()
+ tree_df = get_tree_data()
+ ecology_df = get_lichen_ecology()
+
+ # Rename the id columns for easier merge
+ lichen_df.rename(columns={'id': 'lichen_id'}, inplace=True)
+ lichen_species_df.rename(columns={'id': 'species_id'}, inplace=True)
+ observation_df.rename(columns={'id': 'observation_id'}, inplace=True)
+ table_df.rename(columns={'id': 'table_id'}, inplace=True)
+ tree_df.rename(columns={'id': 'tree_id'}, inplace=True)
+ ecology_df.rename(columns={'id': 'ecology_id'}, inplace=True)
+
+ return lichen_df, lichen_species_df, observation_df, table_df, tree_df, ecology_df
diff --git a/Dashboards/utils/colors.py b/Dashboards/utils/colors.py
new file mode 100644
index 0000000..1d5cfc3
--- /dev/null
+++ b/Dashboards/utils/colors.py
@@ -0,0 +1,48 @@
+def hex_to_rgb(hex_color):
+ """Convert hex color to RGB."""
+ hex_color = hex_color.lstrip('#')
+ return tuple(int(hex_color[i:i+2], 16) for i in (0, 2, 4))
+
+def rgb_to_hex(rgb_color):
+ """Convert RGB color to hex."""
+ return '#{:02x}{:02x}{:02x}'.format(*rgb_color)
+
+def lighten_color(rgb_color, factor=0.5):
+ """Lighten the color by mixing it with white."""
+ white = (255, 255, 255)
+ return tuple(int((1 - factor) * c + factor * w) for c, w in zip(rgb_color, white, strict=False))
+
+def create_pastel_palette(base_palette, factor=0.5):
+ """Create a pastel version of the base color palette."""
+ pastel_palette = []
+ for hex_color in base_palette:
+ rgb_color = hex_to_rgb(hex_color)
+ pastel_rgb = lighten_color(rgb_color, factor)
+ pastel_hex = rgb_to_hex(pastel_rgb)
+ pastel_palette.append(pastel_hex)
+ return pastel_palette
+
+
+if __name__ == "__main__":
+
+ base_color_palette = [
+ "#333D43",
+ "#37444C",
+ "#3A4C58",
+ "#3C5665",
+ "#3D6176",
+ "#3C6D8C",
+ "#387CA6",
+ "#4A86AB",
+ "#608FAD",
+ "#799AAF",
+ "#90A7B5",
+ "#A6B6BF",
+ "#BDC6CC",
+]
+
+ # Create a pastel version of the base color palette
+ pastel_color_palette = create_pastel_palette(base_color_palette, factor=0.7)
+
+ # Print the pastel color palette
+ print("Pastel Color Palette:", pastel_color_palette)
diff --git a/poetry.lock b/poetry.lock
index b3ddf2e..b819e05 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -1,30 +1,5 @@
# This file is automatically @generated by Poetry 1.8.3 and should not be changed by hand.
-[[package]]
-name = "altair"
-version = "5.3.0"
-description = "Vega-Altair: A declarative statistical visualization library for Python."
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "altair-5.3.0-py3-none-any.whl", hash = "sha256:7084a1dab4d83c5e7e5246b92dc1b4451a6c68fd057f3716ee9d315c8980e59a"},
- {file = "altair-5.3.0.tar.gz", hash = "sha256:5a268b1a0983b23d8f9129f819f956174aa7aea2719ed55a52eba9979b9f6675"},
-]
-
-[package.dependencies]
-jinja2 = "*"
-jsonschema = ">=3.0"
-numpy = "*"
-packaging = "*"
-pandas = ">=0.25"
-toolz = "*"
-typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""}
-
-[package.extras]
-all = ["altair-tiles (>=0.3.0)", "anywidget (>=0.9.0)", "pyarrow (>=11)", "vega-datasets (>=0.9.0)", "vegafusion[embed] (>=1.6.6)", "vl-convert-python (>=1.3.0)"]
-dev = ["geopandas", "hatch", "ipython", "m2r", "mypy", "pandas-stubs", "pytest", "pytest-cov", "ruff (>=0.3.0)", "types-jsonschema", "types-setuptools"]
-doc = ["docutils", "jinja2", "myst-parser", "numpydoc", "pillow (>=9,<10)", "pydata-sphinx-theme (>=0.14.1)", "scipy", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinxext-altair"]
-
[[package]]
name = "anyio"
version = "4.6.2"
@@ -921,38 +896,6 @@ files = [
{file = "fqdn-1.5.1.tar.gz", hash = "sha256:105ed3677e767fb5ca086a0c1f4bb66ebc3c100be518f0e0d755d9eae164d89f"},
]
-[[package]]
-name = "gitdb"
-version = "4.0.11"
-description = "Git Object Database"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "gitdb-4.0.11-py3-none-any.whl", hash = "sha256:81a3407ddd2ee8df444cbacea00e2d038e40150acfa3001696fe0dcf1d3adfa4"},
- {file = "gitdb-4.0.11.tar.gz", hash = "sha256:bf5421126136d6d0af55bc1e7c1af1c397a34f5b7bd79e776cd3e89785c2b04b"},
-]
-
-[package.dependencies]
-smmap = ">=3.0.1,<6"
-
-[[package]]
-name = "gitpython"
-version = "3.1.43"
-description = "GitPython is a Python library used to interact with Git repositories"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "GitPython-3.1.43-py3-none-any.whl", hash = "sha256:eec7ec56b92aad751f9912a73404bc02ba212a23adb2c7098ee668417051a1ff"},
- {file = "GitPython-3.1.43.tar.gz", hash = "sha256:35f314a9f878467f5453cc1fee295c3e18e52f1b99f10f6cf5b1682e968a9e7c"},
-]
-
-[package.dependencies]
-gitdb = ">=4.0.1,<5"
-
-[package.extras]
-doc = ["sphinx (==4.3.2)", "sphinx-autodoc-typehints", "sphinx-rtd-theme", "sphinxcontrib-applehelp (>=1.0.2,<=1.0.4)", "sphinxcontrib-devhelp (==1.0.2)", "sphinxcontrib-htmlhelp (>=2.0.0,<=2.0.1)", "sphinxcontrib-qthelp (==1.0.3)", "sphinxcontrib-serializinghtml (==1.1.5)"]
-test = ["coverage[toml]", "ddt (>=1.1.1,!=1.4.3)", "mock", "mypy", "pre-commit", "pytest (>=7.3.1)", "pytest-cov", "pytest-instafail", "pytest-mock", "pytest-sugar", "typing-extensions"]
-
[[package]]
name = "greenlet"
version = "3.0.3"
@@ -1652,30 +1595,6 @@ files = [
{file = "kiwisolver-1.4.5.tar.gz", hash = "sha256:e57e563a57fb22a142da34f38acc2fc1a5c864bc29ca1517a88abc963e60d6ec"},
]
-[[package]]
-name = "markdown-it-py"
-version = "3.0.0"
-description = "Python port of markdown-it. Markdown parsing, done right!"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"},
- {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"},
-]
-
-[package.dependencies]
-mdurl = ">=0.1,<1.0"
-
-[package.extras]
-benchmarking = ["psutil", "pytest", "pytest-benchmark"]
-code-style = ["pre-commit (>=3.0,<4.0)"]
-compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"]
-linkify = ["linkify-it-py (>=1,<3)"]
-plugins = ["mdit-py-plugins"]
-profiling = ["gprof2dot"]
-rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"]
-testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"]
-
[[package]]
name = "markupsafe"
version = "2.1.5"
@@ -1811,17 +1730,6 @@ files = [
[package.dependencies]
traitlets = "*"
-[[package]]
-name = "mdurl"
-version = "0.1.2"
-description = "Markdown URL utilities"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"},
- {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"},
-]
-
[[package]]
name = "mistune"
version = "3.0.2"
@@ -2330,26 +2238,6 @@ files = [
[package.dependencies]
wcwidth = "*"
-[[package]]
-name = "protobuf"
-version = "5.27.1"
-description = ""
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "protobuf-5.27.1-cp310-abi3-win32.whl", hash = "sha256:3adc15ec0ff35c5b2d0992f9345b04a540c1e73bfee3ff1643db43cc1d734333"},
- {file = "protobuf-5.27.1-cp310-abi3-win_amd64.whl", hash = "sha256:25236b69ab4ce1bec413fd4b68a15ef8141794427e0b4dc173e9d5d9dffc3bcd"},
- {file = "protobuf-5.27.1-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4e38fc29d7df32e01a41cf118b5a968b1efd46b9c41ff515234e794011c78b17"},
- {file = "protobuf-5.27.1-cp38-abi3-manylinux2014_aarch64.whl", hash = "sha256:917ed03c3eb8a2d51c3496359f5b53b4e4b7e40edfbdd3d3f34336e0eef6825a"},
- {file = "protobuf-5.27.1-cp38-abi3-manylinux2014_x86_64.whl", hash = "sha256:ee52874a9e69a30271649be88ecbe69d374232e8fd0b4e4b0aaaa87f429f1631"},
- {file = "protobuf-5.27.1-cp38-cp38-win32.whl", hash = "sha256:7a97b9c5aed86b9ca289eb5148df6c208ab5bb6906930590961e08f097258107"},
- {file = "protobuf-5.27.1-cp38-cp38-win_amd64.whl", hash = "sha256:f6abd0f69968792da7460d3c2cfa7d94fd74e1c21df321eb6345b963f9ec3d8d"},
- {file = "protobuf-5.27.1-cp39-cp39-win32.whl", hash = "sha256:dfddb7537f789002cc4eb00752c92e67885badcc7005566f2c5de9d969d3282d"},
- {file = "protobuf-5.27.1-cp39-cp39-win_amd64.whl", hash = "sha256:39309898b912ca6febb0084ea912e976482834f401be35840a008da12d189340"},
- {file = "protobuf-5.27.1-py3-none-any.whl", hash = "sha256:4ac7249a1530a2ed50e24201d6630125ced04b30619262f06224616e0030b6cf"},
- {file = "protobuf-5.27.1.tar.gz", hash = "sha256:df5e5b8e39b7d1c25b186ffdf9f44f40f810bbcc9d2b71d9d3156fee5a9adf15"},
-]
-
[[package]]
name = "psutil"
version = "6.0.0"
@@ -2485,54 +2373,6 @@ files = [
[package.extras]
tests = ["pytest"]
-[[package]]
-name = "pyarrow"
-version = "16.1.0"
-description = "Python library for Apache Arrow"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pyarrow-16.1.0-cp310-cp310-macosx_10_15_x86_64.whl", hash = "sha256:17e23b9a65a70cc733d8b738baa6ad3722298fa0c81d88f63ff94bf25eaa77b9"},
- {file = "pyarrow-16.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4740cc41e2ba5d641071d0ab5e9ef9b5e6e8c7611351a5cb7c1d175eaf43674a"},
- {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98100e0268d04e0eec47b73f20b39c45b4006f3c4233719c3848aa27a03c1aef"},
- {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f68f409e7b283c085f2da014f9ef81e885d90dcd733bd648cfba3ef265961848"},
- {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_aarch64.whl", hash = "sha256:a8914cd176f448e09746037b0c6b3a9d7688cef451ec5735094055116857580c"},
- {file = "pyarrow-16.1.0-cp310-cp310-manylinux_2_28_x86_64.whl", hash = "sha256:48be160782c0556156d91adbdd5a4a7e719f8d407cb46ae3bb4eaee09b3111bd"},
- {file = "pyarrow-16.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9cf389d444b0f41d9fe1444b70650fea31e9d52cfcb5f818b7888b91b586efff"},
- {file = "pyarrow-16.1.0-cp311-cp311-macosx_10_15_x86_64.whl", hash = "sha256:d0ebea336b535b37eee9eee31761813086d33ed06de9ab6fc6aaa0bace7b250c"},
- {file = "pyarrow-16.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2e73cfc4a99e796727919c5541c65bb88b973377501e39b9842ea71401ca6c1c"},
- {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf9251264247ecfe93e5f5a0cd43b8ae834f1e61d1abca22da55b20c788417f6"},
- {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddf5aace92d520d3d2a20031d8b0ec27b4395cab9f74e07cc95edf42a5cc0147"},
- {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_aarch64.whl", hash = "sha256:25233642583bf658f629eb230b9bb79d9af4d9f9229890b3c878699c82f7d11e"},
- {file = "pyarrow-16.1.0-cp311-cp311-manylinux_2_28_x86_64.whl", hash = "sha256:a33a64576fddfbec0a44112eaf844c20853647ca833e9a647bfae0582b2ff94b"},
- {file = "pyarrow-16.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:185d121b50836379fe012753cf15c4ba9638bda9645183ab36246923875f8d1b"},
- {file = "pyarrow-16.1.0-cp312-cp312-macosx_10_15_x86_64.whl", hash = "sha256:2e51ca1d6ed7f2e9d5c3c83decf27b0d17bb207a7dea986e8dc3e24f80ff7d6f"},
- {file = "pyarrow-16.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06ebccb6f8cb7357de85f60d5da50e83507954af617d7b05f48af1621d331c9a"},
- {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b04707f1979815f5e49824ce52d1dceb46e2f12909a48a6a753fe7cafbc44a0c"},
- {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0d32000693deff8dc5df444b032b5985a48592c0697cb6e3071a5d59888714e2"},
- {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:8785bb10d5d6fd5e15d718ee1d1f914fe768bf8b4d1e5e9bf253de8a26cb1628"},
- {file = "pyarrow-16.1.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:e1369af39587b794873b8a307cc6623a3b1194e69399af0efd05bb202195a5a7"},
- {file = "pyarrow-16.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:febde33305f1498f6df85e8020bca496d0e9ebf2093bab9e0f65e2b4ae2b3444"},
- {file = "pyarrow-16.1.0-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:b5f5705ab977947a43ac83b52ade3b881eb6e95fcc02d76f501d549a210ba77f"},
- {file = "pyarrow-16.1.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0d27bf89dfc2576f6206e9cd6cf7a107c9c06dc13d53bbc25b0bd4556f19cf5f"},
- {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d07de3ee730647a600037bc1d7b7994067ed64d0eba797ac74b2bc77384f4c2"},
- {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fbef391b63f708e103df99fbaa3acf9f671d77a183a07546ba2f2c297b361e83"},
- {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_aarch64.whl", hash = "sha256:19741c4dbbbc986d38856ee7ddfdd6a00fc3b0fc2d928795b95410d38bb97d15"},
- {file = "pyarrow-16.1.0-cp38-cp38-manylinux_2_28_x86_64.whl", hash = "sha256:f2c5fb249caa17b94e2b9278b36a05ce03d3180e6da0c4c3b3ce5b2788f30eed"},
- {file = "pyarrow-16.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:e6b6d3cd35fbb93b70ade1336022cc1147b95ec6af7d36906ca7fe432eb09710"},
- {file = "pyarrow-16.1.0-cp39-cp39-macosx_10_15_x86_64.whl", hash = "sha256:18da9b76a36a954665ccca8aa6bd9f46c1145f79c0bb8f4f244f5f8e799bca55"},
- {file = "pyarrow-16.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:99f7549779b6e434467d2aa43ab2b7224dd9e41bdde486020bae198978c9e05e"},
- {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f07fdffe4fd5b15f5ec15c8b64584868d063bc22b86b46c9695624ca3505b7b4"},
- {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ddfe389a08ea374972bd4065d5f25d14e36b43ebc22fc75f7b951f24378bf0b5"},
- {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_aarch64.whl", hash = "sha256:3b20bd67c94b3a2ea0a749d2a5712fc845a69cb5d52e78e6449bbd295611f3aa"},
- {file = "pyarrow-16.1.0-cp39-cp39-manylinux_2_28_x86_64.whl", hash = "sha256:ba8ac20693c0bb0bf4b238751d4409e62852004a8cf031c73b0e0962b03e45e3"},
- {file = "pyarrow-16.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:31a1851751433d89a986616015841977e0a188662fcffd1a5677453f1df2de0a"},
- {file = "pyarrow-16.1.0.tar.gz", hash = "sha256:15fbb22ea96d11f0b5768504a3f961edab25eaf4197c341720c4a387f6c60315"},
-]
-
-[package.dependencies]
-numpy = ">=1.16.6"
-
[[package]]
name = "pycparser"
version = "2.22"
@@ -2544,25 +2384,6 @@ files = [
{file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"},
]
-[[package]]
-name = "pydeck"
-version = "0.9.1"
-description = "Widget for deck.gl maps"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "pydeck-0.9.1-py2.py3-none-any.whl", hash = "sha256:b3f75ba0d273fc917094fa61224f3f6076ca8752b93d46faf3bcfd9f9d59b038"},
- {file = "pydeck-0.9.1.tar.gz", hash = "sha256:f74475ae637951d63f2ee58326757f8d4f9cd9f2a457cf42950715003e2cb605"},
-]
-
-[package.dependencies]
-jinja2 = ">=2.10.1"
-numpy = ">=1.16.4"
-
-[package.extras]
-carto = ["pydeck-carto"]
-jupyter = ["ipykernel (>=5.1.2)", "ipython (>=5.8.0)", "ipywidgets (>=7,<8)", "traitlets (>=4.3.2)"]
-
[[package]]
name = "pygments"
version = "2.18.0"
@@ -2980,24 +2801,6 @@ files = [
{file = "rfc3986_validator-0.1.1.tar.gz", hash = "sha256:3d44bde7921b3b9ec3ae4e3adca370438eccebc676456449b145d533b240d055"},
]
-[[package]]
-name = "rich"
-version = "13.7.1"
-description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal"
-optional = false
-python-versions = ">=3.7.0"
-files = [
- {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"},
- {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"},
-]
-
-[package.dependencies]
-markdown-it-py = ">=2.2.0"
-pygments = ">=2.13.0,<3.0.0"
-
-[package.extras]
-jupyter = ["ipywidgets (>=7.5.1,<9)"]
-
[[package]]
name = "rpds-py"
version = "0.18.1"
@@ -3169,17 +2972,6 @@ files = [
{file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"},
]
-[[package]]
-name = "smmap"
-version = "5.0.1"
-description = "A pure Python implementation of a sliding window memory map manager"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "smmap-5.0.1-py3-none-any.whl", hash = "sha256:e6d8668fa5f93e706934a62d7b4db19c8d9eb8cf2adbb75ef1b675aa332b69da"},
- {file = "smmap-5.0.1.tar.gz", hash = "sha256:dceeb6c0028fdb6734471eb07c0cd2aae706ccaecab45965ee83f11c8d3b1f62"},
-]
-
[[package]]
name = "sniffio"
version = "1.3.1"
@@ -3308,41 +3100,6 @@ pure-eval = "*"
[package.extras]
tests = ["cython", "littleutils", "pygments", "pytest", "typeguard"]
-[[package]]
-name = "streamlit"
-version = "1.36.0"
-description = "A faster way to build and share data apps"
-optional = false
-python-versions = "!=3.9.7,>=3.8"
-files = [
- {file = "streamlit-1.36.0-py2.py3-none-any.whl", hash = "sha256:3399a33ea5faa26c05dd433d142eefe68ade67e9189a9e1d47a1731ae30a1c42"},
- {file = "streamlit-1.36.0.tar.gz", hash = "sha256:a12af9f0eb61ab5832f438336257b1ec20eb29d8e0e0c6b40a79116ba939bc9c"},
-]
-
-[package.dependencies]
-altair = ">=4.0,<6"
-blinker = ">=1.0.0,<2"
-cachetools = ">=4.0,<6"
-click = ">=7.0,<9"
-gitpython = ">=3.0.7,<3.1.19 || >3.1.19,<4"
-numpy = ">=1.20,<3"
-packaging = ">=20,<25"
-pandas = ">=1.3.0,<3"
-pillow = ">=7.1.0,<11"
-protobuf = ">=3.20,<6"
-pyarrow = ">=7.0"
-pydeck = ">=0.8.0b4,<1"
-requests = ">=2.27,<3"
-rich = ">=10.14.0,<14"
-tenacity = ">=8.1.0,<9"
-toml = ">=0.10.1,<2"
-tornado = ">=6.0.3,<7"
-typing-extensions = ">=4.3.0,<5"
-watchdog = {version = ">=2.1.5,<5", markers = "platform_system != \"Darwin\""}
-
-[package.extras]
-snowflake = ["snowflake-connector-python (>=2.8.0)", "snowflake-snowpark-python (>=0.9.0)"]
-
[[package]]
name = "tenacity"
version = "8.4.2"
@@ -3397,17 +3154,6 @@ webencodings = ">=0.4"
doc = ["sphinx", "sphinx_rtd_theme"]
test = ["pytest", "ruff"]
-[[package]]
-name = "toml"
-version = "0.10.2"
-description = "Python Library for Tom's Obvious, Minimal Language"
-optional = false
-python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*"
-files = [
- {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"},
- {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"},
-]
-
[[package]]
name = "tomli"
version = "2.0.1"
@@ -3419,17 +3165,6 @@ files = [
{file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"},
]
-[[package]]
-name = "toolz"
-version = "0.12.1"
-description = "List processing tools and functional utilities"
-optional = false
-python-versions = ">=3.7"
-files = [
- {file = "toolz-0.12.1-py3-none-any.whl", hash = "sha256:d22731364c07d72eea0a0ad45bafb2c2937ab6fd38a3507bf55eae8744aa7d85"},
- {file = "toolz-0.12.1.tar.gz", hash = "sha256:ecca342664893f177a13dac0e6b41cbd8ac25a358e5f215316d43e2100224f4d"},
-]
-
[[package]]
name = "tornado"
version = "6.4.1"
@@ -3576,50 +3311,6 @@ platformdirs = ">=3.9.1,<5"
docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"]
test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8)", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10)"]
-[[package]]
-name = "watchdog"
-version = "4.0.1"
-description = "Filesystem events monitoring"
-optional = false
-python-versions = ">=3.8"
-files = [
- {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:da2dfdaa8006eb6a71051795856bedd97e5b03e57da96f98e375682c48850645"},
- {file = "watchdog-4.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e93f451f2dfa433d97765ca2634628b789b49ba8b504fdde5837cdcf25fdb53b"},
- {file = "watchdog-4.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:ef0107bbb6a55f5be727cfc2ef945d5676b97bffb8425650dadbb184be9f9a2b"},
- {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:17e32f147d8bf9657e0922c0940bcde863b894cd871dbb694beb6704cfbd2fb5"},
- {file = "watchdog-4.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:03e70d2df2258fb6cb0e95bbdbe06c16e608af94a3ffbd2b90c3f1e83eb10767"},
- {file = "watchdog-4.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:123587af84260c991dc5f62a6e7ef3d1c57dfddc99faacee508c71d287248459"},
- {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:093b23e6906a8b97051191a4a0c73a77ecc958121d42346274c6af6520dec175"},
- {file = "watchdog-4.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:611be3904f9843f0529c35a3ff3fd617449463cb4b73b1633950b3d97fa4bfb7"},
- {file = "watchdog-4.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:62c613ad689ddcb11707f030e722fa929f322ef7e4f18f5335d2b73c61a85c28"},
- {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:d4925e4bf7b9bddd1c3de13c9b8a2cdb89a468f640e66fbfabaf735bd85b3e35"},
- {file = "watchdog-4.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cad0bbd66cd59fc474b4a4376bc5ac3fc698723510cbb64091c2a793b18654db"},
- {file = "watchdog-4.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:a3c2c317a8fb53e5b3d25790553796105501a235343f5d2bf23bb8649c2c8709"},
- {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c9904904b6564d4ee8a1ed820db76185a3c96e05560c776c79a6ce5ab71888ba"},
- {file = "watchdog-4.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:667f3c579e813fcbad1b784db7a1aaa96524bed53437e119f6a2f5de4db04235"},
- {file = "watchdog-4.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d10a681c9a1d5a77e75c48a3b8e1a9f2ae2928eda463e8d33660437705659682"},
- {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:0144c0ea9997b92615af1d94afc0c217e07ce2c14912c7b1a5731776329fcfc7"},
- {file = "watchdog-4.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:998d2be6976a0ee3a81fb8e2777900c28641fb5bfbd0c84717d89bca0addcdc5"},
- {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:e7921319fe4430b11278d924ef66d4daa469fafb1da679a2e48c935fa27af193"},
- {file = "watchdog-4.0.1-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:f0de0f284248ab40188f23380b03b59126d1479cd59940f2a34f8852db710625"},
- {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:bca36be5707e81b9e6ce3208d92d95540d4ca244c006b61511753583c81c70dd"},
- {file = "watchdog-4.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ab998f567ebdf6b1da7dc1e5accfaa7c6992244629c0fdaef062f43249bd8dee"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_aarch64.whl", hash = "sha256:dddba7ca1c807045323b6af4ff80f5ddc4d654c8bce8317dde1bd96b128ed253"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_armv7l.whl", hash = "sha256:4513ec234c68b14d4161440e07f995f231be21a09329051e67a2118a7a612d2d"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_i686.whl", hash = "sha256:4107ac5ab936a63952dea2a46a734a23230aa2f6f9db1291bf171dac3ebd53c6"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64.whl", hash = "sha256:6e8c70d2cd745daec2a08734d9f63092b793ad97612470a0ee4cbb8f5f705c57"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:f27279d060e2ab24c0aa98363ff906d2386aa6c4dc2f1a374655d4e02a6c5e5e"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_s390x.whl", hash = "sha256:f8affdf3c0f0466e69f5b3917cdd042f89c8c63aebdb9f7c078996f607cdb0f5"},
- {file = "watchdog-4.0.1-py3-none-manylinux2014_x86_64.whl", hash = "sha256:ac7041b385f04c047fcc2951dc001671dee1b7e0615cde772e84b01fbf68ee84"},
- {file = "watchdog-4.0.1-py3-none-win32.whl", hash = "sha256:206afc3d964f9a233e6ad34618ec60b9837d0582b500b63687e34011e15bb429"},
- {file = "watchdog-4.0.1-py3-none-win_amd64.whl", hash = "sha256:7577b3c43e5909623149f76b099ac49a1a01ca4e167d1785c76eb52fa585745a"},
- {file = "watchdog-4.0.1-py3-none-win_ia64.whl", hash = "sha256:d7b9f5f3299e8dd230880b6c55504a1f69cf1e4316275d1b215ebdd8187ec88d"},
- {file = "watchdog-4.0.1.tar.gz", hash = "sha256:eebaacf674fa25511e8867028d281e602ee6500045b57f43b08778082f7f8b44"},
-]
-
-[package.extras]
-watchmedo = ["PyYAML (>=3.10)"]
-
[[package]]
name = "wcwidth"
version = "0.2.13"
@@ -3708,4 +3399,4 @@ test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools",
[metadata]
lock-version = "2.0"
python-versions = "^3.10"
-content-hash = "a9eeca3b126c68bb03b0ffae50eb174ba33e6627661de1bb96cd5ebaf0066dad"
+content-hash = "faf2eaa6733f3ca3851302598fd1053228d388b1a5687afa7520f09eb2b7c7c5"
diff --git a/pyproject.toml b/pyproject.toml
index fe221ff..26c49c2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -17,7 +17,6 @@ plotly = "^5.22.0"
matplotlib = "^3.9.0"
seaborn = "^0.13.2"
dash = "^2.17.1"
-streamlit = "^1.36.0"
sqlalchemy = "^2.0.31"
psycopg2-binary = "^2.9.9"
python-dotenv = "^1.0.1"