From 0dd26a53d886e176694828a29146c98f99841f1f Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Wed, 18 May 2022 17:00:03 +0700 Subject: [PATCH 01/14] removing global variable SUMMARY_ALL --- app.py | 26 +++++++++++++++----------- 1 file changed, 15 insertions(+), 11 deletions(-) diff --git a/app.py b/app.py index d4ad768..a2d5691 100644 --- a/app.py +++ b/app.py @@ -21,9 +21,6 @@ "https://cdn.jsdelivr.net/gh/AnnMarieW/dash-bootstrap-templates@V1.0.4/dbc.min.css" ) -# GLOBAL VARS -SUMMARY_ALL = None - # APP app = dash.Dash( APP_TITLE, @@ -184,7 +181,6 @@ def callback_download_table(_, table_data, table_columns): prevent_initial_call=True, ) def callback_analyze(_, table_data, table_columns): - global SUMMARY_ALL button_viz_analysis_disabled = True button_viz_analysis_outline = True @@ -192,12 +188,12 @@ def callback_analyze(_, table_data, table_columns): try: dataframe = pyfunc.transform_to_dataframe(table_data, table_columns) - SUMMARY_ALL = pyfunc.generate_summary_all(dataframe, n_days=["16D", "MS", "YS"]) + summary_all = pyfunc.generate_summary_all(dataframe, n_days=["16D", "MS", "YS"]) tables = [ pylayoutfunc.create_table_summary( summary, f"table-analyze-{counter}", deletable=False ) - for counter, summary in enumerate(SUMMARY_ALL) + for counter, summary in enumerate(summary_all) ] children = pylayoutfunc.create_tabcard_table_layout(tables) @@ -222,7 +218,11 @@ def callback_analyze(_, table_data, table_columns): ) def callback_download_results(_): - dataframe = pd.concat(SUMMARY_ALL, axis=1, keys=["Biweekly", "Monthly", "Yearly"]) + # TODO: FIX GLOBAL VARIABLE SUMMARY ALL + + summary_all = ... + + dataframe = pd.concat(summary_all, axis=1, keys=["Biweekly", "Monthly", "Yearly"]) return dcc.send_data_frame(dataframe.to_csv, "results.csv") @@ -231,7 +231,7 @@ def callback_download_results(_): Input("button-viz-analysis", "n_clicks"), prevent_initial_call=True, ) -def callback_troubleshoot(_): +def callback_graph_analysis(_): from itertools import product label_periods = ["Biweekly", "Monthly", "Yearly"] @@ -239,6 +239,10 @@ def callback_troubleshoot(_): label_raindry = ["Dry + Rain"] label_ufunc = label_maxsum + label_raindry + # TODO: FIX GLOBAL VARIABLE SUMMARY_ALL + + summary_all = ... + graphs_maxsum = [ pyfigure.figure_summary_maxsum( summary, @@ -246,15 +250,15 @@ def callback_troubleshoot(_): period=period, subplot_titles=["Max", "Sum"], ) - for summary, title, period in zip(SUMMARY_ALL, label_maxsum * 3, label_periods) + for summary, title, period in zip(summary_all, label_maxsum * 3, label_periods) ] graphs_raindry = [ pyfigure.figure_summary_raindry( summary, title=f"{period}: {title}", period=period ) - for summary, title, period in zip(SUMMARY_ALL, label_raindry * 3, label_periods) + for summary, title, period in zip(summary_all, label_raindry * 3, label_periods) ] - graph_maxdate = [pyfigure.figure_summary_maxdate(SUMMARY_ALL)] + graph_maxdate = [pyfigure.figure_summary_maxdate(summary_all)] all_graphs = graphs_maxsum + graphs_raindry + graph_maxdate labels = [": ".join(i) for i in product(label_ufunc, label_periods)] From f5eb5a63d8eb6e1a455de6d460846ff8bfe3ee46 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Thu, 19 May 2022 10:55:00 +0700 Subject: [PATCH 02/14] =?UTF-8?q?=F0=9F=8C=8D=20remove=20global=20variable?= =?UTF-8?q?s?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 77 +++++++++++++++++++++++++++++++++++++++++++++++-------- pyfunc.py | 33 +++++++++++++++++++++--- 2 files changed, 96 insertions(+), 14 deletions(-) diff --git a/app.py b/app.py index a2d5691..eb0c7c7 100644 --- a/app.py +++ b/app.py @@ -214,24 +214,67 @@ def callback_analyze(_, table_data, table_columns): @app.callback( Output("download-analysis-csv", "data"), Input("button-download-analysis-csv", "n_clicks"), + State("table-analyze-0", "data"), + State("table-analyze-0", "columns"), + State("table-analyze-1", "data"), + State("table-analyze-1", "columns"), + State("table-analyze-2", "data"), + State("table-analyze-2", "columns"), prevent_initial_call=True, ) -def callback_download_results(_): +def callback_download_results( + _, + biweekly_data, + biweekly_columns, + monthly_data, + monthly_columns, + yearly_data, + yearly_columns, +): + + biweekly = (biweekly_data, biweekly_columns) + monthly = (monthly_data, monthly_columns) + yearly = (yearly_data, yearly_columns) + + summary_all = [] + for period in (biweekly, monthly, yearly): + data, columns = period + dataframe = pyfunc.transform_to_dataframe( + data, + columns, + multiindex=True, + apply_numeric=False, + parse_dates=["max_date"], + ) + summary_all.append(dataframe) - # TODO: FIX GLOBAL VARIABLE SUMMARY ALL - - summary_all = ... + dataframe_all = pd.concat( + summary_all, axis=1, keys=["Biweekly", "Monthly", "Yearly"] + ) - dataframe = pd.concat(summary_all, axis=1, keys=["Biweekly", "Monthly", "Yearly"]) - return dcc.send_data_frame(dataframe.to_csv, "results.csv") + return dcc.send_data_frame(dataframe_all.to_csv, "results.csv") @app.callback( Output("tab-graph-analysis", "children"), Input("button-viz-analysis", "n_clicks"), + State("table-analyze-0", "data"), + State("table-analyze-0", "columns"), + State("table-analyze-1", "data"), + State("table-analyze-1", "columns"), + State("table-analyze-2", "data"), + State("table-analyze-2", "columns"), prevent_initial_call=True, ) -def callback_graph_analysis(_): +def callback_graph_analysis( + _, + biweekly_data, + biweekly_columns, + monthly_data, + monthly_columns, + yearly_data, + yearly_columns, +): from itertools import product label_periods = ["Biweekly", "Monthly", "Yearly"] @@ -239,10 +282,22 @@ def callback_graph_analysis(_): label_raindry = ["Dry + Rain"] label_ufunc = label_maxsum + label_raindry - # TODO: FIX GLOBAL VARIABLE SUMMARY_ALL - - summary_all = ... - + biweekly = (biweekly_data, biweekly_columns) + monthly = (monthly_data, monthly_columns) + yearly = (yearly_data, yearly_columns) + + summary_all = [] + for summary_period in (biweekly, monthly, yearly): + data, columns = summary_period + dataframe = pyfunc.transform_to_dataframe( + data, + columns, + multiindex=True, + apply_numeric=False, + parse_dates=["max_date"], + ) + summary_all.append(dataframe) + graphs_maxsum = [ pyfigure.figure_summary_maxsum( summary, diff --git a/pyfunc.py b/pyfunc.py index e1eb153..086702b 100644 --- a/pyfunc.py +++ b/pyfunc.py @@ -58,7 +58,7 @@ def max_date(vector): if vector.any(): return vector.idxmax().date() else: - return np.nan + return pd.NaT def max(vector): return vector.max() @@ -70,7 +70,7 @@ def max(vector): dataframe, ufunc=ufunc, ufunc_col=ufunc_col, n_days=n_days ) - return summary + return summary.infer_objects() def generate_summary_all(dataframe, n_days: list = None): @@ -84,10 +84,15 @@ def generate_summary_all(dataframe, n_days: list = None): def transform_to_dataframe( - table_data, table_columns, multiindex: bool = False, apply_numeric: bool = True + table_data, + table_columns, + multiindex: bool = False, + apply_numeric: bool = True, + parse_dates: list = None, ): dataframe = pd.DataFrame(table_data) + if multiindex is True: dataframe.columns = pd.MultiIndex.from_tuples( [item["name"] for item in table_columns] @@ -98,9 +103,31 @@ def transform_to_dataframe( dataframe["DATE"] = pd.to_datetime(dataframe.DATE) dataframe = dataframe.set_index("DATE").sort_index() + if multiindex is True: + # removing date (index.name) from top level multiindex + dataframe.columns = pd.MultiIndex.from_tuples(dataframe.columns.to_flat_index()) + if apply_numeric is True: dataframe = dataframe.apply(pd.to_numeric, errors="coerce") else: dataframe = dataframe.infer_objects() + if parse_dates is not None: + if multiindex: + for col_dates in parse_dates: + col_parsing = [ + col_tuple + for col_tuple in dataframe.columns + if col_dates in col_tuple + ] + for col_dates in col_parsing: + dataframe[col_dates] = pd.to_datetime( + dataframe[col_dates], errors="coerce" + ) + else: + for col_dates in parse_dates: + dataframe[col_dates] = pd.to_datetime( + dataframe[col_dates], errors="coerce" + ) + return dataframe From 6ea6028cdcac2e4b8033eedc08070e79cb198028 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Thu, 19 May 2022 11:18:00 +0700 Subject: [PATCH 03/14] =?UTF-8?q?=E2=9C=8A=20fix=20#4?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app_config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app_config.yml b/app_config.yml index 26ddac4..10dfd1d 100644 --- a/app_config.yml +++ b/app_config.yml @@ -12,6 +12,6 @@ TEMPLATE: SHOW_LEGEND_INSIDE: False SHOW_RANGESELECTOR: False -VERSION: v1.0.0 +VERSION: v1.1.0 GITHUB_LINK: https://github.com/taruma/dash-hidrokit-rainfall GITHUB_REPO: taruma/dash-hidrokit-rainfall \ No newline at end of file From 6a850c2905967551f37ca0dceecf71ba71747f66 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Thu, 19 May 2022 11:43:04 +0700 Subject: [PATCH 04/14] =?UTF-8?q?=F0=9F=93=91=20active=20tab=20default=20b?= =?UTF-8?q?ehaviour?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 2 +- pylayoutfunc.py | 20 ++++++++++++++++---- 2 files changed, 17 insertions(+), 5 deletions(-) diff --git a/app.py b/app.py index eb0c7c7..c41b7f2 100644 --- a/app.py +++ b/app.py @@ -319,7 +319,7 @@ def callback_graph_analysis( labels = [": ".join(i) for i in product(label_ufunc, label_periods)] labels += ["Maximum Rainfall Events"] - children = pylayoutfunc.create_tabcard_graph_layout(all_graphs, labels) + children = pylayoutfunc.create_tabcard_graph_layout(all_graphs, labels, active_tab='Maximum Rainfall Events') return children diff --git a/pylayoutfunc.py b/pylayoutfunc.py index 6347a84..fb80eb6 100644 --- a/pylayoutfunc.py +++ b/pylayoutfunc.py @@ -76,7 +76,10 @@ def create_table_summary( def create_tabcard_table_layout( - tables: list, tab_names: list = None, disabled: list = None + tables: list, + tab_names: list = None, + disabled: list = None, + active_tab: str = None, ): disabled = [False] * len(tables) if disabled is None else disabled @@ -88,14 +91,20 @@ def create_tabcard_table_layout( dbc.Card(dbc.CardBody([table]), class_name="my-3"), label=tab_name, disabled=active, + tab_id=tab_name, ) tab.append(_tab) - return dbc.Tabs(tab) + active_tab = tab_names[0] if active_tab is None else active_tab + + return dbc.Tabs(tab, active_tab=active_tab) def create_tabcard_graph_layout( - graphs: list[dcc.Graph], tab_names: list = None, disabled: list = None + graphs: list[dcc.Graph], + tab_names: list = None, + disabled: list = None, + active_tab: str = None, ): disabled = [False] * len(graphs) if disabled is None else disabled @@ -107,10 +116,13 @@ def create_tabcard_graph_layout( dbc.Card(dbc.CardBody([graph]), class_name="my-3"), label=tab_name, disabled=active, + tab_id=tab_name, ) tab.append(_tab) - return dbc.Tabs(tab) + active_tab = tab_names[0] if active_tab is None else active_tab + + return dbc.Tabs(tab, active_tab=active_tab) def create_HTML_alert(alert: dbc.Alert, className: str = "my-2"): From fd5228594284a160b96f5c5de3492a42466a602c Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Thu, 19 May 2022 12:57:37 +0700 Subject: [PATCH 05/14] =?UTF-8?q?=F0=9F=94=A2=20fix=20#5=20ordered=20stati?= =?UTF-8?q?ons?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyfigure.py | 16 ++++++++++------ 1 file changed, 10 insertions(+), 6 deletions(-) diff --git a/pyfigure.py b/pyfigure.py index eea2d7a..59055c1 100644 --- a/pyfigure.py +++ b/pyfigure.py @@ -6,7 +6,7 @@ import plotly.graph_objects as go import pytemplate import pandas as pd -from collections import defaultdict +from collections import defaultdict, OrderedDict from itertools import cycle, islice THRESHOLD_SUMMARY = (367 * 8) // 2 @@ -145,8 +145,9 @@ def figure_summary_maxsum( fig.layout.images = [_generate_dict_watermark(n) for n in range(2, rows + 1)] data_dict = defaultdict(list) - - for station in summary.columns.levels[0]: + stations = [station_name for station_name, _ in summary.columns.to_list()] + stations = list(OrderedDict.fromkeys(stations)) + for station in stations: for ufcol, series in summary[station].items(): if ufcol in ufunc_cols: _bar = go.Bar( @@ -268,8 +269,9 @@ def figure_summary_raindry( ) data_dict = defaultdict(list) - - for station in summary.columns.levels[0]: + stations = [station_name for station_name, _ in summary.columns.to_list()] + stations = list(OrderedDict.fromkeys(stations)) + for station in stations: for ufcol, series in summary[station].items(): if ufcol in ufunc_cols + ["n_left"]: if ufcol in ufunc_cols: @@ -398,7 +400,9 @@ def figure_summary_maxdate( all_stat = [] for summary, period in zip(summary_all, periods): - for station in summary.columns.levels[0]: + stations = [station_name for station_name, _ in summary.columns.to_list()] + stations = list(OrderedDict.fromkeys(stations)) + for station in stations: _max = summary[station].dropna(subset=ufunc_col) _max["max_date"] = pd.to_datetime(_max["max_date"]) _max = _max.set_index("max_date")[["max"]] From f7d4364c7e1639f64f8464e511c9d30dca5907d8 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Thu, 19 May 2022 13:25:46 +0700 Subject: [PATCH 06/14] =?UTF-8?q?=F0=9F=9B=A0=EF=B8=8F=20fix=20#8?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyfunc.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/pyfunc.py b/pyfunc.py index 086702b..aa3ae6b 100644 --- a/pyfunc.py +++ b/pyfunc.py @@ -91,14 +91,14 @@ def transform_to_dataframe( parse_dates: list = None, ): - dataframe = pd.DataFrame(table_data) - if multiindex is True: + dataframe = pd.DataFrame(table_data) dataframe.columns = pd.MultiIndex.from_tuples( [item["name"] for item in table_columns] ) else: - dataframe.columns = [item["name"] for item in table_columns] + columns = pd.Index([item["name"] for item in table_columns]) + dataframe = pd.DataFrame(table_data, columns=columns) dataframe["DATE"] = pd.to_datetime(dataframe.DATE) dataframe = dataframe.set_index("DATE").sort_index() From 5d1b3c333c8abb00c82e468c96a3b5ac4e00d850 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Thu, 19 May 2022 14:05:26 +0700 Subject: [PATCH 07/14] =?UTF-8?q?=E2=A4=B4=EF=B8=8F=20increase/remove=20th?= =?UTF-8?q?reshold?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit remove threshold for maximum rainfall events charts and yearly. --- pyfigure.py | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/pyfigure.py b/pyfigure.py index 59055c1..4ccd898 100644 --- a/pyfigure.py +++ b/pyfigure.py @@ -129,7 +129,9 @@ def figure_summary_maxsum( ufunc_cols = ["max", "sum"] if ufunc_cols is None else ufunc_cols subplot_titles = ufunc_cols if subplot_titles is None else subplot_titles - if (summary.size > THRESHOLD_SUMMARY) or (summary.index.size > THRESHOLD_XAXES): + if ( + (summary.size > THRESHOLD_SUMMARY) or (summary.index.size > THRESHOLD_XAXES) + ) and (period.lower() != "yearly"): return dcc.Graph( figure=figure_empty("dataset above threshold"), config={"staticPlot": True} ) @@ -244,7 +246,9 @@ def figure_summary_raindry( summary.columns.levels[0] if subplot_titles is None else subplot_titles ) - if (summary.size > THRESHOLD_SUMMARY) or (summary.index.size > THRESHOLD_XAXES): + if ( + (summary.size > THRESHOLD_SUMMARY) or (summary.index.size > THRESHOLD_XAXES) + ) and (period.lower() != "yearly"): return dcc.Graph( figure=figure_empty("dataset above threshold"), config={"staticPlot": True} ) @@ -375,11 +379,6 @@ def figure_summary_maxdate( bubble_sizes: list[int] = None, ): - if summary_all[0].size > THRESHOLD_SUMMARY: - return dcc.Graph( - figure=figure_empty("dataset above threshold"), config={"staticPlot": True} - ) - ufunc_col = ["max_date"] if ufunc_col is None else ufunc_col subplot_titles = ( ["Biweekly", "Monthly", "Yearly"] if subplot_titles is None else subplot_titles @@ -411,7 +410,7 @@ def figure_summary_maxdate( all_df = pd.concat(all_stat, axis=1) - bubble_sizes = [8, 9, 10] if bubble_sizes is None else bubble_sizes + bubble_sizes = [10, 10, 10] if bubble_sizes is None else bubble_sizes data_dict = defaultdict(list) for period, bubble_size in zip(all_df.columns.levels[0], bubble_sizes): @@ -424,6 +423,7 @@ def figure_summary_maxdate( mode="markers", marker_size=series.fillna(0), marker_sizeref=sizeref, + marker_line_width=0, legendgroup=station, legendgrouptitle_text=station, name=f"{period}", From 39ce7c8d8d70e30e7b13f66a4e646a103a71634d Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 10:58:39 +0700 Subject: [PATCH 08/14] =?UTF-8?q?=F0=9F=86=95=20new=20feature=20#7=20consi?= =?UTF-8?q?stency=20graph?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 65 ++++++++++++++++++++++++++++++++++++++++++------ pyfigure.py | 61 +++++++++++++++++++++++++++++++++++++++++++++ pyfunc.py | 7 ++++++ pylayout.py | 42 ++++++++++++++++++++++++++++++- pylayoutfunc.py | 22 +++++++++++++--- requirements.txt | 1 + 6 files changed, 186 insertions(+), 12 deletions(-) diff --git a/app.py b/app.py index c41b7f2..bf63e1c 100644 --- a/app.py +++ b/app.py @@ -39,6 +39,7 @@ pylayout.HTML_TITLE, pylayout.HTML_SUBTITLE, pylayout.HTML_ALERT_README, + pylayout.HTML_ALERT_SPONSOR, pylayout.HTML_ROW_BUTTON_UPLOAD, pylayout.HTML_ROW_TABLE, pylayout.HTML_ROW_BUTTON_VIZ, @@ -48,6 +49,7 @@ pylayout.HTML_ROW_TABLE_ANALYZE, pylayout.HTML_ROW_BUTTON_VIZ_ANALYSIS, pylayout.HTML_ROW_GRAPH_ANALYSIS, + pylayout.HTML_ROW_GRAPH_CONSISTENCY, pylayout.HTML_ALERT_CONTRIBUTION, pylayout.HTML_MADEBY, pylayout.HTML_OTHER_PROJECTS, @@ -80,7 +82,7 @@ def callback_upload(content, filename, filedate, _): if ctx.triggered[0]["prop_id"] == "button-skip.n_clicks": dataframe = pd.read_csv( - Path(r"./example_2Y4S_named.csv"), index_col=0, parse_dates=True + Path(r"./example_9Y1S_named.csv"), index_col=0, parse_dates=True ) filename = None filedate = None @@ -91,12 +93,13 @@ def callback_upload(content, filename, filedate, _): button_viz_outline = True if dataframe is not None: + editable = [False] + [True] * max(1, (len(dataframe.columns) - 1)) children = pylayoutfunc.create_table_layout( dataframe, "output-table", filename=filename, filedate=filedate, - editable=True, + editable=editable, renamable=True, ) upload_disabled = False @@ -188,15 +191,33 @@ def callback_analyze(_, table_data, table_columns): try: dataframe = pyfunc.transform_to_dataframe(table_data, table_columns) + + # SUMMARY summary_all = pyfunc.generate_summary_all(dataframe, n_days=["16D", "MS", "YS"]) - tables = [ + tables_summary = [ pylayoutfunc.create_table_summary( summary, f"table-analyze-{counter}", deletable=False ) for counter, summary in enumerate(summary_all) ] - children = pylayoutfunc.create_tabcard_table_layout(tables) + # CONSISTENCY + consistency = pyfunc.calc_consistency(dataframe) + + _, tables_consistency = pylayoutfunc.create_table_layout( + consistency, "table-consistency", deletable=False + ) + + tables_consistency = [tables_consistency] + + # LAYOUT + tables_all = tables_summary + tables_consistency + tab_names = "Biweekly Monthly Yearly Consistency".split() + + children = pylayoutfunc.create_tabcard_table_layout( + tables_all, tab_names=tab_names + ) + button_viz_analysis_disabled = False button_viz_analysis_outline = False row_button_download_analysis_style = {"visibility": "visible"} @@ -220,6 +241,8 @@ def callback_analyze(_, table_data, table_columns): State("table-analyze-1", "columns"), State("table-analyze-2", "data"), State("table-analyze-2", "columns"), + State("table-consistency", "data"), + State("table-consistency", "columns"), prevent_initial_call=True, ) def callback_download_results( @@ -230,6 +253,8 @@ def callback_download_results( monthly_columns, yearly_data, yearly_columns, + consistency_data, + consistency_columns, ): biweekly = (biweekly_data, biweekly_columns) @@ -248,8 +273,14 @@ def callback_download_results( ) summary_all.append(dataframe) + consistency = pyfunc.transform_to_dataframe(consistency_data, consistency_columns) + stations = consistency.columns.to_list() + consistency.columns = pd.MultiIndex.from_product([stations, [""]]) + dataframe_all = pd.concat( - summary_all, axis=1, keys=["Biweekly", "Monthly", "Yearly"] + summary_all + [consistency], + axis=1, + keys=["Biweekly", "Monthly", "Yearly", "Consistency"], ) return dcc.send_data_frame(dataframe_all.to_csv, "results.csv") @@ -257,6 +288,7 @@ def callback_download_results( @app.callback( Output("tab-graph-analysis", "children"), + Output("tab-graph-consistency", "children"), Input("button-viz-analysis", "n_clicks"), State("table-analyze-0", "data"), State("table-analyze-0", "columns"), @@ -264,6 +296,8 @@ def callback_download_results( State("table-analyze-1", "columns"), State("table-analyze-2", "data"), State("table-analyze-2", "columns"), + State("table-consistency", "data"), + State("table-consistency", "columns"), prevent_initial_call=True, ) def callback_graph_analysis( @@ -274,6 +308,8 @@ def callback_graph_analysis( monthly_columns, yearly_data, yearly_columns, + consistency_data, + consistency_columns, ): from itertools import product @@ -319,9 +355,24 @@ def callback_graph_analysis( labels = [": ".join(i) for i in product(label_ufunc, label_periods)] labels += ["Maximum Rainfall Events"] - children = pylayoutfunc.create_tabcard_graph_layout(all_graphs, labels, active_tab='Maximum Rainfall Events') + children_analysis = pylayoutfunc.create_tabcard_graph_layout( + all_graphs, labels, active_tab="Maximum Rainfall Events" + ) + + # CONSISTENCY + + consistency = pyfunc.transform_to_dataframe(consistency_data, consistency_columns) + + graph_consistency = [ + pyfigure.figure_consistency_single(consistency, col=station) + for station in consistency.columns + ] + + children_consistency = pylayoutfunc.create_tabcard_graph_layout( + graph_consistency, consistency.columns + ) - return children + return children_analysis, children_consistency @app.callback( diff --git a/pyfigure.py b/pyfigure.py index 4ccd898..447c397 100644 --- a/pyfigure.py +++ b/pyfigure.py @@ -493,3 +493,64 @@ def update_axis(fig, update, n, axis: str = "x"): data.marker.color = color return dcc.Graph(figure=fig) + + +def figure_consistency_single(consistency: pd.DataFrame, col: str = None) -> go.Figure: + import re + + col = consistency.columns[0] if col is None else col + + new_dataframe = consistency.copy() + new_dataframe["number"] = np.arange(1, len(new_dataframe) + 1) + + fig = px.scatter( + x=new_dataframe.number, + y=new_dataframe[col], + trendline="ols", + trendline_color_override=pytemplate.hktemplate.layout.colorway[1], + ) + + # MODIFIED SCATTER + + _scatter = fig.data[0] + _scatter.mode = "markers+lines" + _scatter.line.dash = "dashdot" + _scatter.line.width = 1 + _scatter.marker.size = 12 + _scatter.marker.symbol = "circle" + _scatter.name = col + _scatter.hovertemplate = ( + f"{col}
%{{y}} mm
%{{x}}" + ) + + # MODIFIED TRENDLINE + + _trendline = fig.data[1] + _oldhovertemplate = _trendline.hovertemplate + + if _oldhovertemplate != "": + re_pattern = re.compile("
(.+)
R.+=([0-9.]+)
") + equation, r2 = re_pattern.findall(_oldhovertemplate)[0] + _newtemplate = ( + "OLS trendline
" + + f"{equation}
" + + f"R2: {r2}
" + + "%{y} mm (trend)
" + + "%{x}" + + "" + ) + _trendline.hovertemplate = _newtemplate + + _trendline.showlegend = True + _trendline.name = "trendline" + + fig.update_layout( + xaxis_title="Date", + yaxis_title="Total Rainfall (mm/year)", + margin=dict(l=0, t=35, b=0, r=0), + xaxis_tickvals=new_dataframe.number, + xaxis_ticktext=new_dataframe.index.year, + yaxis_tickformat=".0f", + ) + + return dcc.Graph(figure=fig) diff --git a/pyfunc.py b/pyfunc.py index aa3ae6b..4ab509b 100644 --- a/pyfunc.py +++ b/pyfunc.py @@ -131,3 +131,10 @@ def transform_to_dataframe( ) return dataframe + + +def calc_consistency(dataframe): + + consistency = dataframe.resample("YS").sum().cumsum() + + return consistency.round() diff --git a/pylayout.py b/pylayout.py index 54a3958..921e52d 100644 --- a/pylayout.py +++ b/pylayout.py @@ -56,7 +56,27 @@ color="warning", ) -HTML_ALERT_README = pylayoutfunc.create_HTML_alert(ALERT_README, className="") +HTML_ALERT_README = pylayoutfunc.create_HTML_alert(ALERT_README, className=None) + +ALERT_SPONSOR = dbc.Alert( + [ + "Terima kasih untuk ", + html.A( + "FIAKO Engineering", + href="https://fiako.co.id", + ), + " yang telah mensponsori versi v1.1.0. Untuk catatan pembaruan bisa dilihat melalui ", + html.A( + "halaman rilis di github", + href="https://github.com/taruma/dash-hidrokit-rainfall/releases/tag/v1.1.0", + ), + ".", + ], + color="info", +) + +HTML_ALERT_SPONSOR = pylayoutfunc.create_HTML_alert(ALERT_SPONSOR, className=None) + DCC_UPLOAD = html.Div( dcc.Upload( @@ -307,6 +327,26 @@ ) ) +HTML_ROW_GRAPH_CONSISTENCY = html.Div( + dbc.Container( + [ + html.H3("Consistency", className="text-center"), + dbc.Row( + dbc.Col( + dcc.Loading( + children=dcc.Graph( + figure=pyfigure.figure_empty(), + config={"staticPlot": True}, + ), + id="tab-graph-consistency", + ), + width={"size": 6, "offset": 3}, + ), + ), + ], + fluid=True, + ) +) _HTML_TROUBLESHOOT = html.Div( dbc.Container( diff --git a/pylayoutfunc.py b/pylayoutfunc.py index fb80eb6..724269b 100644 --- a/pylayoutfunc.py +++ b/pylayoutfunc.py @@ -10,21 +10,35 @@ def create_table_layout( idtable, filename=None, filedate=None, - editable=False, + editable: list | bool = False, deletable=True, renamable=False, ): + from collections.abc import Iterable + new_dataframe = dataframe.rename_axis("DATE").reset_index() new_dataframe.DATE = new_dataframe.DATE.dt.date + + editable = ( + editable + if isinstance(editable, Iterable) + else [editable] * len(new_dataframe.columns) + ) + table = dash_table.DataTable( id=idtable, columns=[ - {"name": i, "id": i, "deletable": deletable, "renamable": renamable} - for i in new_dataframe.columns + { + "name": i, + "id": i, + "deletable": deletable, + "renamable": renamable, + "editable": edit_col, + } + for i, edit_col in zip(new_dataframe.columns, editable) ], data=new_dataframe.to_dict("records"), page_size=20, - editable=editable, cell_selectable=True, filter_action="native", sort_action="native", diff --git a/requirements.txt b/requirements.txt index dd3cb4b..2915d45 100644 --- a/requirements.txt +++ b/requirements.txt @@ -6,5 +6,6 @@ pandas==1.4.2 plotly==5.7.0 python_box==6.0.2 requests==2.27.1 +statsmodels pyyaml gunicorn From f8502249afe212ee16a742712267fbf013e9700e Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 13:47:20 +0700 Subject: [PATCH 09/14] =?UTF-8?q?=F0=9F=94=A7=20correction=20cumulative=20?= =?UTF-8?q?and=20consistency?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 76 +++++++++++++++++++++++++++++++++-------------------- pyfigure.py | 69 +++++++++++++++++++++++++++++++++++++++++++++--- pyfunc.py | 2 +- pylayout.py | 30 ++++++++++++++++++--- 4 files changed, 141 insertions(+), 36 deletions(-) diff --git a/app.py b/app.py index bf63e1c..95c1bf1 100644 --- a/app.py +++ b/app.py @@ -49,6 +49,7 @@ pylayout.HTML_ROW_TABLE_ANALYZE, pylayout.HTML_ROW_BUTTON_VIZ_ANALYSIS, pylayout.HTML_ROW_GRAPH_ANALYSIS, + pylayout.HTML_ROW_GRAPH_CUMSUM, pylayout.HTML_ROW_GRAPH_CONSISTENCY, pylayout.HTML_ALERT_CONTRIBUTION, pylayout.HTML_MADEBY, @@ -201,18 +202,18 @@ def callback_analyze(_, table_data, table_columns): for counter, summary in enumerate(summary_all) ] - # CONSISTENCY - consistency = pyfunc.calc_consistency(dataframe) + # CUMUMLATIVE SUM + cumsum = pyfunc.calc_cumsum(dataframe) - _, tables_consistency = pylayoutfunc.create_table_layout( - consistency, "table-consistency", deletable=False + _, table_cumsum = pylayoutfunc.create_table_layout( + cumsum, "table-cumsum", deletable=False ) - tables_consistency = [tables_consistency] + table_cumsum = [table_cumsum] # LAYOUT - tables_all = tables_summary + tables_consistency - tab_names = "Biweekly Monthly Yearly Consistency".split() + tables_all = tables_summary + table_cumsum + tab_names = "Biweekly Monthly Yearly Cumulative".split() children = pylayoutfunc.create_tabcard_table_layout( tables_all, tab_names=tab_names @@ -241,8 +242,8 @@ def callback_analyze(_, table_data, table_columns): State("table-analyze-1", "columns"), State("table-analyze-2", "data"), State("table-analyze-2", "columns"), - State("table-consistency", "data"), - State("table-consistency", "columns"), + State("table-cumsum", "data"), + State("table-cumsum", "columns"), prevent_initial_call=True, ) def callback_download_results( @@ -253,8 +254,8 @@ def callback_download_results( monthly_columns, yearly_data, yearly_columns, - consistency_data, - consistency_columns, + cumsum_data, + cumsum_columns, ): biweekly = (biweekly_data, biweekly_columns) @@ -273,14 +274,14 @@ def callback_download_results( ) summary_all.append(dataframe) - consistency = pyfunc.transform_to_dataframe(consistency_data, consistency_columns) - stations = consistency.columns.to_list() - consistency.columns = pd.MultiIndex.from_product([stations, [""]]) + cumsum = pyfunc.transform_to_dataframe(cumsum_data, cumsum_columns) + stations = cumsum.columns.to_list() + cumsum.columns = pd.MultiIndex.from_product([stations, [""]]) dataframe_all = pd.concat( - summary_all + [consistency], + summary_all + [cumsum], axis=1, - keys=["Biweekly", "Monthly", "Yearly", "Consistency"], + keys=["Biweekly", "Monthly", "Yearly", "Cumulative"], ) return dcc.send_data_frame(dataframe_all.to_csv, "results.csv") @@ -288,6 +289,7 @@ def callback_download_results( @app.callback( Output("tab-graph-analysis", "children"), + Output("tab-graph-cumsum", "children"), Output("tab-graph-consistency", "children"), Input("button-viz-analysis", "n_clicks"), State("table-analyze-0", "data"), @@ -296,8 +298,8 @@ def callback_download_results( State("table-analyze-1", "columns"), State("table-analyze-2", "data"), State("table-analyze-2", "columns"), - State("table-consistency", "data"), - State("table-consistency", "columns"), + State("table-cumsum", "data"), + State("table-cumsum", "columns"), prevent_initial_call=True, ) def callback_graph_analysis( @@ -308,8 +310,8 @@ def callback_graph_analysis( monthly_columns, yearly_data, yearly_columns, - consistency_data, - consistency_columns, + cumsum_data, + cumsum_columns, ): from itertools import product @@ -359,20 +361,38 @@ def callback_graph_analysis( all_graphs, labels, active_tab="Maximum Rainfall Events" ) - # CONSISTENCY + # CUMSUM - consistency = pyfunc.transform_to_dataframe(consistency_data, consistency_columns) + cumsum = pyfunc.transform_to_dataframe(cumsum_data, cumsum_columns) - graph_consistency = [ - pyfigure.figure_consistency_single(consistency, col=station) - for station in consistency.columns + graph_cumsum = [ + pyfigure.figure_cumsum_single(cumsum, col=station) for station in cumsum.columns ] - children_consistency = pylayoutfunc.create_tabcard_graph_layout( - graph_consistency, consistency.columns + children_cumsum = pylayoutfunc.create_tabcard_graph_layout( + graph_cumsum, cumsum.columns ) - return children_analysis, children_consistency + # CONSISTENCY + + if cumsum.columns.size == 1: + children_consistency = ( + dcc.Graph( + figure=pyfigure.figure_empty(text="Not Available for Single Station"), + config={"staticPlot": True}, + ), + ) + else: + graph_consistency = [ + pyfigure.figure_consistency(cumsum, col=station) + for station in cumsum.columns + ] + + children_consistency = pylayoutfunc.create_tabcard_graph_layout( + graph_consistency, cumsum.columns + ) + + return children_analysis, children_cumsum, children_consistency @app.callback( diff --git a/pyfigure.py b/pyfigure.py index 447c397..4c3068a 100644 --- a/pyfigure.py +++ b/pyfigure.py @@ -495,12 +495,12 @@ def update_axis(fig, update, n, axis: str = "x"): return dcc.Graph(figure=fig) -def figure_consistency_single(consistency: pd.DataFrame, col: str = None) -> go.Figure: +def figure_cumsum_single(cumsum: pd.DataFrame, col: str = None) -> go.Figure: import re - col = consistency.columns[0] if col is None else col + col = cumsum.columns[0] if col is None else col - new_dataframe = consistency.copy() + new_dataframe = cumsum.copy() new_dataframe["number"] = np.arange(1, len(new_dataframe) + 1) fig = px.scatter( @@ -546,7 +546,7 @@ def figure_consistency_single(consistency: pd.DataFrame, col: str = None) -> go. fig.update_layout( xaxis_title="Date", - yaxis_title="Total Rainfall (mm/year)", + yaxis_title="Cumulative Annual (mm)", margin=dict(l=0, t=35, b=0, r=0), xaxis_tickvals=new_dataframe.number, xaxis_ticktext=new_dataframe.index.year, @@ -554,3 +554,64 @@ def figure_consistency_single(consistency: pd.DataFrame, col: str = None) -> go. ) return dcc.Graph(figure=fig) + + +def figure_consistency(cumsum: pd.DataFrame, col: str) -> go.Figure: + import re + + cumsum = cumsum.copy() + + # Create Mean Cumulative Other Stations + cumsum_y = cumsum[col] + other_stations = cumsum.columns.drop(col) + cumsum_x = cumsum[other_stations].mean(axis=1).cumsum() + + fig = px.scatter( + x=cumsum_x, + y=cumsum_y, + trendline="ols", + trendline_color_override=pytemplate.hktemplate.layout.colorway[1], + ) + + # MODIFIED SCATTER + + _scatter = fig.data[0] + _scatter.mode = "markers+lines" + _scatter.line.dash = "dashdot" + _scatter.line.width = 1 + _scatter.marker.size = 12 + _scatter.marker.symbol = "circle" + _scatter.name = col + _scatter.hovertemplate = ( + f"{col}
y: %{{y}} mm
x: %{{x}}
" + ) + + # MODIFIED TRENDLINE + + _trendline = fig.data[1] + _oldhovertemplate = _trendline.hovertemplate + + if _oldhovertemplate != "": + re_pattern = re.compile("
(.+)
R.+=([0-9.]+)
") + equation, r2 = re_pattern.findall(_oldhovertemplate)[0] + _newtemplate = ( + "OLS trendline
" + + f"{equation}
" + + f"R2: {r2}
" + + "%{y} mm (trend)
" + + "%{x}" + + "" + ) + _trendline.hovertemplate = _newtemplate + + _trendline.showlegend = True + _trendline.name = "trendline" + + fig.update_layout( + xaxis_title="Cumulative Average Annual Other Stations (mm)", + yaxis_title=f"Cumulative Average Annual (mm)", + margin=dict(l=0, t=35, b=0, r=0), + yaxis_tickformat=".0f", + ) + + return dcc.Graph(figure=fig) diff --git a/pyfunc.py b/pyfunc.py index 4ab509b..f203ff0 100644 --- a/pyfunc.py +++ b/pyfunc.py @@ -133,7 +133,7 @@ def transform_to_dataframe( return dataframe -def calc_consistency(dataframe): +def calc_cumsum(dataframe): consistency = dataframe.resample("YS").sum().cumsum() diff --git a/pylayout.py b/pylayout.py index 921e52d..4bdfb8b 100644 --- a/pylayout.py +++ b/pylayout.py @@ -324,13 +324,36 @@ id="tab-graph-analysis", ), fluid=True, - ) + ), + className="my-3", +) + +HTML_ROW_GRAPH_CUMSUM = html.Div( + dbc.Container( + [ + html.H3("Total Cumulative Annual", className="text-center"), + dbc.Row( + dbc.Col( + dcc.Loading( + children=dcc.Graph( + figure=pyfigure.figure_empty(), + config={"staticPlot": True}, + ), + id="tab-graph-cumsum", + ), + width={"size": 6, "offset": 3}, + ), + ), + ], + fluid=True, + ), + className="my-3", ) HTML_ROW_GRAPH_CONSISTENCY = html.Div( dbc.Container( [ - html.H3("Consistency", className="text-center"), + html.H3("Consistency (Double Mass Curve)", className="text-center"), dbc.Row( dbc.Col( dcc.Loading( @@ -345,7 +368,8 @@ ), ], fluid=True, - ) + ), + className="my-3", ) _HTML_TROUBLESHOOT = html.Div( From 918316ea9ac302009a99f8c3a5f7f53046cb7986 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 15:07:09 +0700 Subject: [PATCH 10/14] =?UTF-8?q?=F0=9F=8F=97=EF=B8=8F=20fix=20upload=20da?= =?UTF-8?q?tatable=20and=20typo?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 5 +++-- pyfigure.py | 3 ++- 2 files changed, 5 insertions(+), 3 deletions(-) diff --git a/app.py b/app.py index 95c1bf1..2cc2a77 100644 --- a/app.py +++ b/app.py @@ -1,3 +1,4 @@ +from click import edit import dash import dash_bootstrap_components as dbc import pandas as pd @@ -83,7 +84,7 @@ def callback_upload(content, filename, filedate, _): if ctx.triggered[0]["prop_id"] == "button-skip.n_clicks": dataframe = pd.read_csv( - Path(r"./example_9Y1S_named.csv"), index_col=0, parse_dates=True + Path(r"./example_2Y4S_named.csv"), index_col=0, parse_dates=True ) filename = None filedate = None @@ -94,7 +95,7 @@ def callback_upload(content, filename, filedate, _): button_viz_outline = True if dataframe is not None: - editable = [False] + [True] * max(1, (len(dataframe.columns) - 1)) + editable = [False] + [True] * len(dataframe.columns) children = pylayoutfunc.create_table_layout( dataframe, "output-table", diff --git a/pyfigure.py b/pyfigure.py index 4c3068a..8c1dc49 100644 --- a/pyfigure.py +++ b/pyfigure.py @@ -599,7 +599,7 @@ def figure_consistency(cumsum: pd.DataFrame, col: str) -> go.Figure: + f"{equation}
" + f"R2: {r2}
" + "%{y} mm (trend)
" - + "%{x}" + + "%{x} mm" + "" ) _trendline.hovertemplate = _newtemplate @@ -612,6 +612,7 @@ def figure_consistency(cumsum: pd.DataFrame, col: str) -> go.Figure: yaxis_title=f"Cumulative Average Annual (mm)", margin=dict(l=0, t=35, b=0, r=0), yaxis_tickformat=".0f", + xaxis_tickformat=".0f", ) return dcc.Graph(figure=fig) From 68d77c7cacd55608e27060c2ae77387e73aecc79 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 15:39:32 +0700 Subject: [PATCH 11/14] =?UTF-8?q?=F0=9F=96=BC=EF=B8=8F=20new=20theme?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app_config.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/app_config.yml b/app_config.yml index 10dfd1d..ae64172 100644 --- a/app_config.yml +++ b/app_config.yml @@ -4,7 +4,7 @@ DASH_APP: DEBUG: False DASH_THEME: - THEME: SIMPLEX + THEME: COSMO TEMPLATE: LOGO_SOURCE: https://raw.githubusercontent.com/hidrokit/static-assets/main/logo_0.4.0-v1.1/hidrokit-hidrokit/50x50square.png From dea86099ba60c6a5ff1f397b7a49edd1adcbec41 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 15:48:20 +0700 Subject: [PATCH 12/14] =?UTF-8?q?=F0=9F=8C=80=20fix=20typo?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- pyfigure.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyfigure.py b/pyfigure.py index 8c1dc49..64609b6 100644 --- a/pyfigure.py +++ b/pyfigure.py @@ -583,7 +583,7 @@ def figure_consistency(cumsum: pd.DataFrame, col: str) -> go.Figure: _scatter.marker.symbol = "circle" _scatter.name = col _scatter.hovertemplate = ( - f"{col}
y: %{{y}} mm
x: %{{x}}
" + f"{col}
y: %{{y}} mm
x: %{{x}} mm
" ) # MODIFIED TRENDLINE From c38c2c34f1bb73b5f5bacc6b5cf4d5ec1c59e411 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 15:48:38 +0700 Subject: [PATCH 13/14] =?UTF-8?q?=F0=9F=86=99=20update=20v1.1.0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 2752b43..b42b891 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,8 @@ -# Dashboard Rainfall Data Explorer +# hidrokit Rainfall Data Explorer (hidrokit-RDE) -![image](https://user-images.githubusercontent.com/1007910/167613715-7b3db12e-47e5-4d43-8765-19ac3551ed46.png) +![image](https://user-images.githubusercontent.com/1007910/169490220-d0b5a944-fa36-452b-b7e1-fcf5e04e415b.png) -__Rainfall Data Explorer__ (`hkrainfall`) adalah _dashboard_ untuk mengeksplorasi data hujan di setiap stasiunnya dan membandingkannya baik secara numerik maupun visual. `hkrainfall` dibuat menggunakan teknologi [Dash + Plotly](https://plotly.com/) dengan bahasa pemrograman Python. Proyek `hkrainfall` bersifat _open-source_ dengan lisensi MIT. +__Rainfall Data Explorer__ (`hkrainfall` / `hidrokit-RDE`) adalah _dashboard_ untuk mengeksplorasi data hujan di setiap stasiunnya dan membandingkannya baik secara numerik maupun visual. `hkrainfall` dibuat menggunakan teknologi [Dash + Plotly](https://plotly.com/) dengan bahasa pemrograman Python. Proyek `hkrainfall` bersifat _open-source_ dengan lisensi MIT. ## Cara Menjalankan Dashboard (Lokal) @@ -29,6 +29,7 @@ Navigasi dashboard ini antara lain: - Setelah tabel sudah dikoreksi. Bisa dilanjutkan ke tahapan analisis data. Perlu diingat, data yang dianalisis sesuai dengan tampilan/informasi tabel terkini. Jadi, jika masih ada filter, maka analisis hanya dilakukan pada data yang telah terfilter. - Klik "Analyze Data" untuk melakukan analisis data. Perlu diingat, proses ini akan memakan waktu jika memiliki dataset yang besar. Jadi, sangat disarankan menggunakan mesin lokal untuk proses ini. Karena yang dapat diakses di web hanya berupa demonstrasi saja dan menggunakan layanan gratis sehingga sangat terbatas kemampuannya. - Analisis data terbagi menjadi tiga periode yaitu 2 periode (biweekly), setiap bulanan (monthly), dan tahunan (yearly). Sebagai catatan, biweekly itu dibagi berdasarkan 16 hari pertama kemudian sisa harinya pada setiap bulan. +- __Baru di `v1.1.0`__: Analisis konsistensi (kurva massa ganda) dan kumulatif hujan tahunan. - Analisis data yang dilakukan berupa: - `days`: Jumlah hari pada setiap periodenya (16 hari untuk biweekly, 1 bulan untuk monthly, dan 1 tahun untuk yearly). - `max`: Nilai maksimum pada setiap periode. @@ -43,6 +44,7 @@ Navigasi dashboard ini antara lain: - Group Bar Chart untuk setiap periode dengan kolom `max` dan `sum`. Grafik ini bisa melihat secara langsung perbandingan nilai antar stasiun. - Stack Bar Chart untuk setiap periode dengan kolom `n_rain` dan `n_dry`. Grafik ini bisa memberikan gambaran periode yang memiliki frekuensi hujan/kekeringan tinggi/rendah secara sekilas. - Bubble Chart (Maximum Rainfall Events) memberikan gambaran besar terkait tanggal kejadian saat hujan maksimum terjadi di setiap stasiun. Ukuran lingkaran menunjukkan seberapa besar hujan maksimum yang terjadi. + - __Baru di `v1.1.0`__: Ditambahkan grafik kumulatif hujan tahunan dan konsistensi (kurva massa ganda) untuk setiap stasiun. Navigasi dengan grafik interaktif plotly: From 064be051aef134e45dd7d45962a20356fa115eb4 Mon Sep 17 00:00:00 2001 From: taruma sakti Date: Fri, 20 May 2022 15:54:19 +0700 Subject: [PATCH 14/14] =?UTF-8?q?=F0=9F=97=91=EF=B8=8F=20delete=20typo?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- app.py | 1 - 1 file changed, 1 deletion(-) diff --git a/app.py b/app.py index 2cc2a77..c50c62e 100644 --- a/app.py +++ b/app.py @@ -1,4 +1,3 @@ -from click import edit import dash import dash_bootstrap_components as dbc import pandas as pd