diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py index 4f89912a8dba1fe7d792b238c0a38f29e00e9286..3109e7ea594c265c373534f755701b17aeb230d0 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py @@ -17,6 +17,8 @@ def get_tuple_ordered_by_shape(fast=False): select_only_acceptable_shape_parameter=False, multiprocessing=True, save_to_file=True, + fit_gev_only_on_non_null_maxima=False, + fit_only_time_series_with_ninety_percent_of_non_null_values=False, show=False) for altitude in altitudes} # Extract all the values diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py index 58abc401897271cdb6302b215a01b5c765f35513..848a048fa5d5daad7d5f54df77ca90d86323177b 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py @@ -7,6 +7,8 @@ import pandas as pd from matplotlib.ticker import PercentFormatter from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal +from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ + ALL_ALTITUDES_WITHOUT_NAN from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ TemporalMarginFitMethod from extreme_fit.model.result_from_model_fit.result_from_extremes.abstract_extract_eurocode_return_level import \ @@ -183,20 +185,21 @@ if __name__ == '__main__': 300 Haut_Var-Haut_Verdon 0.8446498197950775 """ - altitudes = [300, 600, 900, 1200, 1500, 1800][:] + altitudes = [300, 600, 900, 1200, 1500, 1800][:3] # altitudes = ALL_ALTITUDES_WITHOUT_NAN # altitudes = [900, 1800, 2700] altitude_to_visualizer = {altitude: StudyVisualizerForNonStationaryTrends(CrocusSnowLoadTotal(altitude=altitude), select_only_acceptable_shape_parameter=True, fit_method=TemporalMarginFitMethod.extremes_fevd_mle, multiprocessing=True, + fit_gev_only_on_non_null_maxima=True, save_to_file=True, show=False) for altitude in altitudes} # plot_qqplot_wrt_standard_gumbel(altitude_to_visualizer) - # plot_hist_psnow(altitude_to_visualizer) - plot_intensity_against_gumbel_quantile_for_time_series_with_missing_zeros(altitude_to_visualizer, nb_worst_examples=None) + plot_hist_psnow(altitude_to_visualizer) + # plot_intensity_against_gumbel_quantile_for_time_series_with_missing_zeros(altitude_to_visualizer, nb_worst_examples=None) # plot_exceedance_psnow(altitude_to_visualizer) # non_stationarity_psnow(altitude_to_visualizer) diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py index ea904aba278c90747d1a09f004399aad77aee3a5..2ce354fc7a4baa4118cedfc0b0b43c201f6c7ae7 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py @@ -18,7 +18,7 @@ class StudyVisualizerForFitWithoutMaximum(StudyVisualizerForNonStationaryTrends) @cached_property def massif_name_to_maximum_index_for_non_null_values(self) -> Tuple[Dict, Dict]: - d = super().massif_name_to_non_null_years_and_maxima + d = super().massif_name_to_years_and_maxima_for_model_fitting d_without_maximum = {} d_maximum = {} for m, (years, maxima) in d.items(): @@ -35,7 +35,7 @@ class StudyVisualizerForFitWithoutMaximum(StudyVisualizerForNonStationaryTrends) return self.massif_name_to_maximum_index_for_non_null_values[1] @cached_property - def massif_name_to_non_null_years_and_maxima(self): + def massif_name_to_years_and_maxima_for_model_fitting(self): return self.massif_name_to_maximum_index_for_non_null_values[0] def maximum_value_test(self): diff --git a/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py b/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py index 724c8063e024f28f98af2c1d020ff1fd2be62c19..56582008cd0c38d2eda1d61381c5503d728c77d5 100644 --- a/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py +++ b/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py @@ -52,12 +52,17 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer): relative_change_trend_plot=True, non_stationary_trend_test_to_marker=None, fit_method=TemporalMarginFitMethod.extremes_fevd_mle, - select_only_acceptable_shape_parameter=True): + select_only_acceptable_shape_parameter=True, + fit_gev_only_on_non_null_maxima=True, + fit_only_time_series_with_ninety_percent_of_non_null_values=True, + ): super().__init__(study, show, save_to_file, only_one_graph, only_first_row, vertical_kde_plot, year_for_kde_plot, plot_block_maxima_quantiles, temporal_non_stationarity, transformation_class, verbose, multiprocessing, complete_non_stationary_trend_analysis, normalization_under_one_observations, score_class) # Add some attributes + self.fit_only_time_series_with_ninety_percent_of_non_null_values = fit_only_time_series_with_ninety_percent_of_non_null_values + self.fit_gev_only_on_non_null_maxima = fit_gev_only_on_non_null_maxima self.select_only_acceptable_shape_parameter = select_only_acceptable_shape_parameter self.fit_method = fit_method self.non_stationary_trend_test_to_marker = non_stationary_trend_test_to_marker @@ -107,15 +112,22 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer): def massif_name_to_eurocode_quantile_level_in_practice(self): """Due to missing data, the the eurocode quantile which 0.98 if we have all the data correspond in practice to the quantile psnow x 0.98 of the data where there is snow""" - return {m: 1 - ((1 - EUROCODE_QUANTILE) / p_snow) for m, p_snow in self.massif_name_to_psnow.items()} + if self.fit_gev_only_on_non_null_maxima: + return {m: 1 - ((1 - EUROCODE_QUANTILE) / p_snow) for m, p_snow in self.massif_name_to_psnow.items()} + else: + return {m: EUROCODE_QUANTILE for m in self.massif_name_to_psnow.keys()} @cached_property - def massif_name_to_non_null_years_and_maxima(self): - # return self.massif_name_to_years_and_maxima - d = {} - for m, (years, maxima) in self.massif_name_to_years_and_maxima.items(): - mask = np.nonzero(maxima) - d[m] = (years[mask], maxima[mask]) + def massif_name_to_years_and_maxima_for_model_fitting(self): + if self.fit_gev_only_on_non_null_maxima: + d = {} + for m, (years, maxima) in self.massif_name_to_years_and_maxima.items(): + mask = np.nonzero(maxima) + d[m] = (years[mask], maxima[mask]) + else: + d = self.massif_name_to_years_and_maxima + if self.fit_only_time_series_with_ninety_percent_of_non_null_values: + d = {m: v for m, v in d.items() if self.massif_name_to_psnow[m] >= 0.90} return d @property @@ -137,7 +149,7 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer): massif_name_to_trend_test_that_minimized_aic = {} massif_name_to_stationary_trend_test_that_minimized_aic = {} massif_name_to_gumbel_trend_test_that_minimized_aic = {} - for massif_name, (x, y) in self.massif_name_to_non_null_years_and_maxima.items(): + for massif_name, (x, y) in self.massif_name_to_years_and_maxima_for_model_fitting.items(): quantile_level = self.massif_name_to_eurocode_quantile_level_in_practice[massif_name] all_trend_test = [ t(years=x, maxima=y, starting_year=starting_year, quantile_level=quantile_level, @@ -317,7 +329,7 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer): -> Dict[str, EurocodeConfidenceIntervalFromExtremes]: # Compute for the uncertainty massif names arguments = [ - [self.massif_name_to_non_null_years_and_maxima[m], + [self.massif_name_to_years_and_maxima_for_model_fitting[m], self.massif_name_and_model_subset_to_model_class(m, model_subset_for_uncertainty), ci_method, self.effective_temporal_covariate, self.massif_name_to_eurocode_quantile_level_in_practice[m] @@ -353,7 +365,7 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer): # Some checks with Gelman convergence diagnosis def massif_name_to_gelman_convergence_value(self, mcmc_iterations, model_class, nb_chains): - arguments = [(self.massif_name_to_non_null_years_and_maxima[m], mcmc_iterations, model_class, nb_chains) + arguments = [(self.massif_name_to_years_and_maxima_for_model_fitting[m], mcmc_iterations, model_class, nb_chains) for m in self.uncertainty_massif_names] if self.multiprocessing: with Pool(NB_CORES) as p: