From 7fea48fadeded89a2d00ac16d6b4b2ddfa00eb9e Mon Sep 17 00:00:00 2001 From: Le Roux Erwan <erwan.le-roux@irstea.fr> Date: Mon, 21 Oct 2019 16:13:56 +0200 Subject: [PATCH] [EUROCODE][DRAWING PER REGION] modify test region eurocode. add test_ gev temporal bayesian. add stationary version for the Bayesian. refactor utils at the root, to root_uils to avoid weird import issues. --- .../departement_alpes_francaises.py | 16 ++- experiment/eurocode_data/eurocode_drawing.py | 0 experiment/eurocode_data/eurocode_region.py | 4 + .../eurocode_return_level_uncertainties.py | 79 +++++++++++++ .../eurocode_data/eurocode_visualizer.py | 82 ++++++++----- .../eurocode_data/main_eurocode_drawing.py | 63 ++++++++++ .../massif_name_to_departement.py | 16 +-- experiment/eurocode_data/utils.py | 5 + .../abstract_extended_study.py | 2 +- .../scm_models_data/abstract_study.py | 2 +- .../scm_models_data/crocus/taline_data.py | 2 +- .../abstract_hypercube_visualizer.py | 2 +- .../altitude_hypercube_visualizer.py | 2 +- .../main_fast_hypercube_several_altitudes.py | 7 +- .../main_files/main_full_hypercube.py | 7 +- .../main_files/main_poster_IMSC2019.py | 2 +- .../utils_hypercube.py | 2 +- .../studies_visualizer.py | 2 +- .../main_study_visualizer.py | 2 +- .../study_visualization/study_visualizer.py | 43 ++++++- .../stations_data/comparison_analysis.py | 2 +- .../stations_data/main_spatial_comparison.py | 2 +- .../comparisons_visualization.py | 2 +- experiment/robustness_plot/single_plot.py | 2 +- experiment/simulation/abstract_simulation.py | 2 +- .../trend_analysis/non_stationary_trends.py | 2 +- .../abstract_gev_trend_test.py | 40 ++----- .../univariate_test_results.py | 44 +++++++ .../trend_analysis/univariate_test/utils.py | 61 ++++------ .../distribution/gev/main_fevd_fixed.R | 1 + .../abstract_margin_estimator.py | 4 +- extreme_fit/estimator/utils.py | 6 +- .../abstract_temporal_linear_margin_model.py | 3 +- .../abstract_margin_function.py | 2 +- .../margin_model/parametric_margin_model.py | 4 +- .../abstract_max_stable_model.py | 4 +- .../result_from_extremes.py | 44 ++++++- .../result_from_ismev.py | 27 +---- .../model/result_from_model_fit/utils.py | 23 ++++ extreme_fit/model/utils.py | 30 +++-- utils.py => root_utils.py | 0 .../coordinates/abstract_coordinates.py | 4 +- .../alps_station_3D_coordinates.py | 2 +- .../alps_precipitation_observations.py | 2 +- test/test_experiment/test_SCM_study.py | 2 +- .../test_coordinate_sensitivity.py | 2 +- test/test_experiment/test_region_eurocode.py | 23 ++-- .../test_gev/test_gev_temporal_bayesian.py | 111 +++++++----------- 48 files changed, 526 insertions(+), 265 deletions(-) create mode 100644 experiment/eurocode_data/eurocode_drawing.py create mode 100644 experiment/eurocode_data/eurocode_return_level_uncertainties.py create mode 100644 experiment/eurocode_data/main_eurocode_drawing.py create mode 100644 experiment/eurocode_data/utils.py create mode 100644 experiment/trend_analysis/univariate_test/univariate_test_results.py rename utils.py => root_utils.py (100%) diff --git a/experiment/eurocode_data/departement_alpes_francaises.py b/experiment/eurocode_data/departement_alpes_francaises.py index 79d005be..e6028a8c 100644 --- a/experiment/eurocode_data/departement_alpes_francaises.py +++ b/experiment/eurocode_data/departement_alpes_francaises.py @@ -1,12 +1,20 @@ from enum import Enum from experiment.eurocode_data.eurocode_region import AbstractEurocodeRegion, E, C2, C1 +from root_utils import get_display_name_from_object_type class AbstractDepartementAlpesFrancaises(object): - def __init__(self, region: type): - self.region = region() # type: AbstractEurocodeRegion + def __init__(self, eurocode_region: type): + self.eurocode_region = eurocode_region() # type: AbstractEurocodeRegion + + def display_limit(self, ax): + pass + + def __str__(self): + return get_display_name_from_object_type(type(self)) + '( {} Eurocode region)'.format( + get_display_name_from_object_type(type(self.eurocode_region))) class HauteSavoie(AbstractDepartementAlpesFrancaises): @@ -32,6 +40,7 @@ class Drome(AbstractDepartementAlpesFrancaises): def __init__(self): super().__init__(C2) + class HautesAlpes(AbstractDepartementAlpesFrancaises): def __init__(self): @@ -48,6 +57,3 @@ class AlpesDeHauteProvence(AbstractDepartementAlpesFrancaises): def __init__(self): super().__init__(C1) - - - diff --git a/experiment/eurocode_data/eurocode_drawing.py b/experiment/eurocode_data/eurocode_drawing.py new file mode 100644 index 00000000..e69de29b diff --git a/experiment/eurocode_data/eurocode_region.py b/experiment/eurocode_data/eurocode_region.py index 2b70fdd6..722cc33d 100644 --- a/experiment/eurocode_data/eurocode_region.py +++ b/experiment/eurocode_data/eurocode_region.py @@ -40,6 +40,10 @@ class AbstractEurocodeRegion(object): def lois_de_variation_1000_and_2000(self): return 3.5, -2.45 + def plot_max_loading(self, ax, altitudes): + ax.plot(altitudes, [self.eurocode_max_loading(altitude) for altitude in altitudes], + label='Eurocode limit') + class C1(AbstractEurocodeRegion): diff --git a/experiment/eurocode_data/eurocode_return_level_uncertainties.py b/experiment/eurocode_data/eurocode_return_level_uncertainties.py new file mode 100644 index 00000000..1e416a8a --- /dev/null +++ b/experiment/eurocode_data/eurocode_return_level_uncertainties.py @@ -0,0 +1,79 @@ +from typing import List + + +import numpy as np +from cached_property import cached_property + +from experiment.eurocode_data.utils import EUROCODE_QUANTILE +from experiment.trend_analysis.univariate_test.utils import load_temporal_coordinates_and_dataset, \ + fitted_linear_margin_estimator +from extreme_fit.distribution.gev.gev_params import GevParams +from extreme_fit.estimator.abstract_estimator import AbstractEstimator +from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator +from extreme_fit.estimator.utils import load_margin_function +from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ + AbstractTemporalLinearMarginModel +from extreme_fit.model.margin_model.margin_function.linear_margin_function import LinearMarginFunction +from extreme_fit.model.result_from_model_fit.result_from_extremes import ResultFromExtremes + + +class ExtractEurocodeReturnLevelFromExtremes(object): + + def __init__(self, estimator: LinearMarginEstimator, year_of_interest: int = 2017): + self.estimator = estimator + self.result_from_fit = self.estimator.result_from_model_fit # type: ResultFromExtremes + self.year_of_interest = year_of_interest + + @property + def margin_functions_from_fit(self) -> List[LinearMarginFunction]: + margin_functions = [] + for _, s in self.result_from_fit.df_posterior_samples.iterrows(): + coef_dict = self.result_from_fit.get_coef_dict_from_posterior_sample(s) + margin_function = load_margin_function(self.estimator, self.estimator.margin_model, coef_dict=coef_dict) + margin_functions.append(margin_function) + return margin_functions + + @property + def gev_params_from_fit_for_year_of_interest(self) -> List[GevParams]: + return [m.get_gev_params(coordinate=np.array([self.year_of_interest]), is_transformed=False) + for m in self.margin_functions_from_fit] + + @cached_property + def posterior_eurocode_return_level_samples_for_year_of_interest(self) -> np.ndarray: + """We divide by 100 to transform the snow water equivalent into snow load""" + return np.array([p.quantile(EUROCODE_QUANTILE) for p in self.gev_params_from_fit_for_year_of_interest]) / 100 + + @property + def posterior_mean_eurocode_return_level_for_the_year_of_interest(self) -> np.ndarray: + return np.mean(self.posterior_eurocode_return_level_samples_for_year_of_interest) + + @property + def posterior_eurocode_return_level_uncertainty_interval_for_the_year_of_interest(self): + bottom_and_upper_quantile = (0.05, 0.95) + return [np.quantile(self.posterior_eurocode_return_level_samples_for_year_of_interest, q=q) + for q in bottom_and_upper_quantile] + + +class EurocodeLevelUncertaintyFromExtremes(object): + + YEAR_OF_INTEREST = 2017 + + def __init__(self, posterior_mean, poster_uncertainty_interval): + self.posterior_mean = posterior_mean + self.poster_uncertainty_interval = poster_uncertainty_interval + + @classmethod + def from_estimator_extremes(cls, estimator_extremes: LinearMarginEstimator): + extractor = ExtractEurocodeReturnLevelFromExtremes(estimator_extremes, cls.YEAR_OF_INTEREST) + return cls(extractor.posterior_mean_eurocode_return_level_for_the_year_of_interest, + extractor.posterior_eurocode_return_level_uncertainty_interval_for_the_year_of_interest) + + @classmethod + def from_maxima_years_model_class(cls, maxima, years, model_class): + # Load coordinates and dataset + coordinates, dataset = load_temporal_coordinates_and_dataset(maxima, years) + # Fitted estimator + fitted_estimator = fitted_linear_margin_estimator(model_class, coordinates, dataset, starting_year=1958, + fit_method=AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR) + # Load object from result from extremes + return cls.from_estimator_extremes(fitted_estimator) \ No newline at end of file diff --git a/experiment/eurocode_data/eurocode_visualizer.py b/experiment/eurocode_data/eurocode_visualizer.py index 387a8042..b5231235 100644 --- a/experiment/eurocode_data/eurocode_visualizer.py +++ b/experiment/eurocode_data/eurocode_visualizer.py @@ -1,31 +1,57 @@ -from collections import OrderedDict +from typing import Dict, List + import matplotlib.pyplot as plt -from experiment.eurocode_data.massif_name_to_departement import massif_name_to_departement_objects -from experiment.eurocode_data.eurocode_region import AbstractEurocodeRegion -from utils import get_display_name_from_object_type - - -def display_region_limit(region_type, altitudes, ordered_massif_name_to_quantiles, - ordered_massif_name_to_significances=None, - display=True): - assert isinstance(ordered_massif_name_to_quantiles, OrderedDict) - assert ordered_massif_name_to_significances is None or isinstance(ordered_massif_name_to_significances, OrderedDict) - # First, select massif name correspond to the region - massif_name_belong_to_the_region = [] - for massif_name in ordered_massif_name_to_quantiles.keys(): - if any([isinstance(dep.region, region_type) for dep in massif_name_to_departement_objects[massif_name]]): - massif_name_belong_to_the_region.append(massif_name) - region_object = region_type() # type: AbstractEurocodeRegion - # Then, display the limit for the region - fig, ax = plt.subplots(1, 1) - ax.plot(altitudes, [region_object.eurocode_max_loading(altitude) for altitude in altitudes], label='Eurocode limit') - # Finally, display the massif curve - for massif_name in massif_name_belong_to_the_region: - ax.plot(altitudes, ordered_massif_name_to_quantiles[massif_name], label=massif_name) - ax.set_title('{} Eurocode region'.format(get_display_name_from_object_type(region_type))) - ax.set_xlabel('Altitude') - ax.set_ylabel('0.98 quantile (in N $m^-2$)') - ax.legend() - if display: +from experiment.eurocode_data.eurocode_return_level_uncertainties import EurocodeLevelUncertaintyFromExtremes +from experiment.eurocode_data.massif_name_to_departement import DEPARTEMENT_TYPES +from experiment.eurocode_data.utils import EUROCODE_QUANTILE, EUROCODE_ALTITUDES +from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes + + +def plot_model_name_to_dep_to_ordered_return_level_uncertainties( + dep_to_model_name_to_ordered_return_level_uncertainties, show=True): + # Create a 9 x 9 plot + axes = create_adjusted_axes(3, 3) + axes = list(axes.flatten()) + ax6 = axes[5] + ax9 = axes[8] + + axes.remove(ax6) + axes.remove(ax9) + ax_to_departement = dict(zip(axes, DEPARTEMENT_TYPES[::-1])) + for ax, departement in ax_to_departement.items(): + plot_dep_to_model_name_dep_to_ordered_return_level_uncertainties(ax, departement, + dep_to_model_name_to_ordered_return_level_uncertainties[ + departement] + ) + ax6.remove() + ax9.remove() + + plt.suptitle('50-year return levels for all French Alps departements. \n' + 'Comparison between the maximum EUROCODE in the departement\n' + 'and the maximum return level found for the massif belonging to the departement') + if show: plt.show() + + +def plot_dep_to_model_name_dep_to_ordered_return_level_uncertainties(ax, dep_class, + model_name_to_ordered_return_level_uncertainties: + Dict[str, List[ + EurocodeLevelUncertaintyFromExtremes]]): + colors = ['red', 'blue', 'green'] + altitudes = EUROCODE_ALTITUDES + alpha = 0.2 + # Display the EUROCODE return level + dep_object = dep_class() + dep_object.eurocode_region.plot_max_loading(ax, altitudes=altitudes) + # Display the return level from model class + for color, (model_name, ordered_return_level_uncertaines) in zip(colors, + model_name_to_ordered_return_level_uncertainties.items()): + mean = [r.posterior_mean for r in ordered_return_level_uncertaines] + ax.plot(altitudes, mean, '-', color=color) + lower_bound = [r.poster_uncertainty_interval[0] for r in ordered_return_level_uncertaines] + upper_bound = [r.poster_uncertainty_interval[1] for r in ordered_return_level_uncertaines] + ax.fill_between(altitudes, lower_bound, upper_bound, color=color, alpha=alpha) + ax.set_title(str(dep_object)) + ax.set_ylabel('Maximum {} quantile (in N $m^-2$)'.format(EUROCODE_QUANTILE)) + ax.set_xlabel('Altitude') diff --git a/experiment/eurocode_data/main_eurocode_drawing.py b/experiment/eurocode_data/main_eurocode_drawing.py new file mode 100644 index 00000000..569cb9a8 --- /dev/null +++ b/experiment/eurocode_data/main_eurocode_drawing.py @@ -0,0 +1,63 @@ +from collections import OrderedDict + +from experiment.eurocode_data.eurocode_visualizer import plot_model_name_to_dep_to_ordered_return_level_uncertainties +from experiment.eurocode_data.massif_name_to_departement import DEPARTEMENT_TYPES +from experiment.eurocode_data.utils import EUROCODE_ALTITUDES +from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSwe3Days +from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ + AltitudeHypercubeVisualizer +from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ + load_altitude_visualizer +from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import StationaryStationModel, \ + NonStationaryLocationAndScaleModel +from root_utils import get_display_name_from_object_type + + +# Model class + + +def dep_to_ordered_return_level_uncertainties(model_class, last_year_for_the_data): + model_name = get_display_name_from_object_type(type(model_class)) + ' ' + str(last_year_for_the_data) + # Load altitude visualizer + # todo: add last years attributes that enables to change the years + altitude_visualizer = load_altitude_visualizer(AltitudeHypercubeVisualizer, altitudes=EUROCODE_ALTITUDES, + last_starting_year=None, nb_data_reduced_for_speed=False, + only_first_one=False, save_to_file=False, + exact_starting_year=1958, + first_starting_year=None, + study_classes=[CrocusSwe3Days], + trend_test_class=None) + # Loop on the data + assert isinstance(altitude_visualizer.tuple_to_study_visualizer, OrderedDict) + dep_to_ordered_return_level_uncertainty = {dep: [] for dep in DEPARTEMENT_TYPES} + for visualizer in altitude_visualizer.tuple_to_study_visualizer.values(): + dep_to_return_level_uncertainty = visualizer.dep_class_to_eurocode_level_uncertainty(model_class) + for dep, return_level_uncertainty in dep_to_return_level_uncertainty.items(): + dep_to_ordered_return_level_uncertainty[dep].append(return_level_uncertainty) + + return {model_name: dep_to_ordered_return_level_uncertainty} + + +def main_drawing(): + model_class_and_last_year = [ + (StationaryStationModel, 1991), + (StationaryStationModel, 2017), + (NonStationaryLocationAndScaleModel, 2017), + ][:1] + model_name_to_dep_to_ordered_return_level = {} + for model_class, last_year_for_the_data in model_class_and_last_year: + model_name_to_dep_to_ordered_return_level.update( + dep_to_ordered_return_level_uncertainties(model_class, last_year_for_the_data)) + # Transform the dictionary into the desired format + dep_to_model_name_to_ordered_return_level_uncertainties = {} + for dep in DEPARTEMENT_TYPES: + d2 = {model_name: model_name_to_dep_to_ordered_return_level[model_name][dep] for model_name in + model_name_to_dep_to_ordered_return_level.keys()} + dep_to_model_name_to_ordered_return_level_uncertainties[dep] = d2 + # Plot graph + plot_model_name_to_dep_to_ordered_return_level_uncertainties( + dep_to_model_name_to_ordered_return_level_uncertainties, show=True) + + +if __name__ == '__main__': + main_drawing() diff --git a/experiment/eurocode_data/massif_name_to_departement.py b/experiment/eurocode_data/massif_name_to_departement.py index 2e585248..d059be32 100644 --- a/experiment/eurocode_data/massif_name_to_departement.py +++ b/experiment/eurocode_data/massif_name_to_departement.py @@ -3,8 +3,6 @@ from typing import Dict, List from experiment.eurocode_data.departement_alpes_francaises import HauteSavoie, Savoie, Isere, Drome, HautesAlpes, \ AlpesDeHauteProvence, AlpesMaritimes, AbstractDepartementAlpesFrancaises - - massif_name_to_departement_types = { 'Chablais': [HauteSavoie], 'Aravis': [HauteSavoie, Savoie], @@ -30,17 +28,15 @@ massif_name_to_departement_types = { 'Haut_Var-Haut_Verdon': [AlpesDeHauteProvence], 'Mercantour': [AlpesMaritimes, AlpesDeHauteProvence]} -massif_name_to_departement_objects = {m: [d() for d in deps] for m, deps in massif_name_to_departement_types.items()} # type: Dict[str, List[AbstractDepartementAlpesFrancaises]] +massif_name_to_departement_objects = {m: [d() for d in deps] for m, deps in + massif_name_to_departement_types.items()} # type: Dict[str, List[AbstractDepartementAlpesFrancaises]] DEPARTEMENT_TYPES = [HauteSavoie, Savoie, Isere, Drome, HautesAlpes, AlpesMaritimes, AlpesDeHauteProvence] -departement_type_to_massif_names = {dep: [k for k, v in massif_name_to_departement_types.items() if dep in v] - for dep in DEPARTEMENT_TYPES -} +dep_class_to_massif_names = {dep: [k for k, v in massif_name_to_departement_types.items() if dep in v] + for dep in DEPARTEMENT_TYPES + } if __name__ == '__main__': - for k, v in departement_type_to_massif_names.items(): + for k, v in dep_class_to_massif_names.items(): print(k, v) - - - diff --git a/experiment/eurocode_data/utils.py b/experiment/eurocode_data/utils.py new file mode 100644 index 00000000..ef3b88eb --- /dev/null +++ b/experiment/eurocode_data/utils.py @@ -0,0 +1,5 @@ + +# Eurocode quantile correspond to a 50 year return period +EUROCODE_QUANTILE = 0.98 +# Altitudes (between low and mid altitudes) < 2000m +EUROCODE_ALTITUDES = [900, 1200, 1500, 1800][:2] \ No newline at end of file diff --git a/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py b/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py index b832fe11..d9c8c9e5 100644 --- a/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py +++ b/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py @@ -4,7 +4,7 @@ from collections import OrderedDict from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_coordinates import \ AbstractSpatialCoordinates -from utils import classproperty +from root_utils import classproperty class AbstractExtendedStudy(AbstractStudy): diff --git a/experiment/meteo_france_data/scm_models_data/abstract_study.py b/experiment/meteo_france_data/scm_models_data/abstract_study.py index a10ac33e..b16ead03 100644 --- a/experiment/meteo_france_data/scm_models_data/abstract_study.py +++ b/experiment/meteo_france_data/scm_models_data/abstract_study.py @@ -30,7 +30,7 @@ from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoo from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_coordinates import \ AbstractSpatialCoordinates from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima -from utils import get_full_path, cached_property, NB_CORES, classproperty +from root_utils import get_full_path, cached_property, NB_CORES, classproperty f = io.StringIO() with redirect_stdout(f): diff --git a/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py b/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py index 183b0818..8de3a72f 100644 --- a/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py +++ b/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py @@ -5,7 +5,7 @@ import pandas as pd from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusDepth from experiment.meteo_france_data.scm_models_data.scm_constants import ALTITUDES -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type massif_name = 'Queyras' study_class = CrocusDepth diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py index 4cfb5d70..30b3417d 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py @@ -7,7 +7,7 @@ import pandas as pd from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ StudyVisualizer -from utils import cached_property, VERSION_TIME, get_display_name_from_object_type +from root_utils import cached_property, VERSION_TIME, get_display_name_from_object_type class AbstractHypercubeVisualizer(object): diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py index dc3fd797..d918abe9 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py @@ -12,7 +12,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat StudyVisualizer from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import AbstractGevTrendTest from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type ALTITUDES_XLABEL = 'altitudes' diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py index 822bd604..086e668b 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py @@ -1,12 +1,7 @@ import time -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ - AltitudeHypercubeVisualizer from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \ - AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrendExtended, \ - AltitudeHypercubeVisualizerWithoutTrendExtended, QuantityHypercubeWithoutTrend -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.quantity_altitude_visualizer import \ - QuantityAltitudeHypercubeVisualizer + AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ load_altitude_visualizer, load_quantity_visualizer from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py index 4ede274d..21311fc2 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py @@ -1,12 +1,7 @@ import time -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ - AltitudeHypercubeVisualizer from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \ - AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrendExtended, \ - AltitudeHypercubeVisualizerWithoutTrendExtended, QuantityHypercubeWithoutTrend -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.quantity_altitude_visualizer import \ - QuantityAltitudeHypercubeVisualizer + AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ load_altitude_visualizer, load_quantity_visualizer from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py index e11a6d31..011477f7 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py @@ -9,7 +9,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visual load_altitude_visualizer from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ SCM_STUDIES, altitude_massif_name_and_study_class_for_poster, SCM_STUDIES_NAMES, SCM_STUDY_NAME_TO_SCM_STUDY -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type POSTER_ALTITUDES = [900, 1800, 2700] diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py index 608b4842..5b7fbcb9 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py @@ -9,7 +9,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat study_iterator_global from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ StudyVisualizer -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type def load_quantity_visualizer(quantity_hypercube_class, altitudes, last_starting_year, nb_data_reduced_for_speed, diff --git a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py index 92da9208..f553d1ac 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py @@ -15,7 +15,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat StudyVisualizer from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest from experiment.meteo_france_data.scm_models_data.visualization.utils import plot_df -from utils import cached_property, get_display_name_from_object_type, VERSION_TIME +from root_utils import cached_property, get_display_name_from_object_type, VERSION_TIME class StudiesVisualizer(object): diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py index 6dcb3303..aeb8f63e 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py @@ -16,7 +16,7 @@ from collections import OrderedDict from experiment.trend_analysis.univariate_test.gev_trend_test_one_parameter import GevLocationTrendTest from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \ BetweenZeroAndOneNormalization, BetweenMinusOneAndOneNormalization -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type SCM_STUDIES = [SafranSnowfall, CrocusSweTotal, CrocusDepth, CrocusSwe3Days] SCM_STUDIES_NAMES = [get_display_name_from_object_type(k) for k in SCM_STUDIES] diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py index 7263aa3d..4b361e43 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py @@ -10,6 +10,9 @@ import numpy as np import pandas as pd import seaborn as sns +from experiment.eurocode_data.eurocode_return_level_uncertainties import ExtractEurocodeReturnLevelFromExtremes, \ + EurocodeLevelUncertaintyFromExtremes +from experiment.eurocode_data.massif_name_to_departement import dep_class_to_massif_names from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy from experiment.trend_analysis.abstract_score import MeanScore, AbstractTrendScore from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy @@ -17,13 +20,14 @@ from experiment.trend_analysis.univariate_test.abstract_univariate_test import A from experiment.trend_analysis.non_stationary_trends import \ ConditionalIndedendenceLocationTrendTest, MaxStableLocationTrendTest, IndependenceLocationTrendTest from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes -from experiment.trend_analysis.univariate_test.utils import compute_gev_change_point_test_results +from experiment.trend_analysis.univariate_test.univariate_test_results import compute_gev_change_point_test_results from experiment.utils import average_smoothing_with_sliding_window from extreme_fit.distribution.abstract_params import AbstractParams from extreme_fit.estimator.full_estimator.abstract_full_estimator import \ FullEstimatorInASingleStepWithSmoothMargin from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator -from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import LinearNonStationaryLocationMarginModel, \ +from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import \ + LinearNonStationaryLocationMarginModel, \ LinearStationaryMarginModel from extreme_fit.model.margin_model.margin_function.abstract_margin_function import \ AbstractMarginFunction @@ -42,7 +46,7 @@ from spatio_temporal_dataset.coordinates.temporal_coordinates.generated_temporal from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima from test.test_utils import load_test_max_stable_models -from utils import get_display_name_from_object_type, VERSION_TIME, float_to_str_with_only_some_significant_digits, \ +from root_utils import get_display_name_from_object_type, VERSION_TIME, float_to_str_with_only_some_significant_digits, \ cached_property BLOCK_MAXIMA_DISPLAY_NAME = 'block maxima ' @@ -350,6 +354,36 @@ class StudyVisualizer(VisualizationParameters): start_year, stop_year = self.study.start_year_and_stop_year return list(range(start_year, stop_year)) + def massif_name_to_eurocode_level_uncertainty(self, model_class) -> Dict[str, EurocodeLevelUncertaintyFromExtremes]: + # todo: add multiprocessing + massif_name_to_eurocode_return_level_uncertainty = {} + for massif_id, massif_name in enumerate(self.study.study_massif_names): + print(massif_name) + years, smooth_maxima = self.smooth_maxima_x_y(massif_id) + res = EurocodeLevelUncertaintyFromExtremes.from_maxima_years_model_class(smooth_maxima, years, model_class) + massif_name_to_eurocode_return_level_uncertainty[massif_name] = res + return massif_name_to_eurocode_return_level_uncertainty + + def dep_class_to_eurocode_level_uncertainty(self, model_class): + """ Take the max with respect to the posterior mean for each departement """ + massif_name_to_eurocode_level_uncertainty = self.massif_name_to_eurocode_level_uncertainty(model_class) + print(massif_name_to_eurocode_level_uncertainty) + dep_class_to_eurocode_level_uncertainty = {} + for dep_class, massif_names in dep_class_to_massif_names.items(): + # Filter the couple of interest + couples = [(k, v) for k, v in massif_name_to_eurocode_level_uncertainty.items() if k in massif_names] + assert len(couples) > 0 + # Find the massif name with the maximum posterior mean + for c in couples: + print(c) + print(c[1].posterior_mean) + massif_name, eurocode_level_uncertainty = max(couples, key=lambda c: c[1].posterior_mean) + dep_class_to_eurocode_level_uncertainty[dep_class] = eurocode_level_uncertainty + return dep_class_to_eurocode_level_uncertainty + + + + @cached_property def massif_name_to_detailed_scores(self): """ @@ -430,7 +464,8 @@ class StudyVisualizer(VisualizationParameters): massif_names = [AbstractExtendedStudy.region_name_to_massif_names[r][0] for r in AbstractExtendedStudy.real_region_names] massif_names_for_sampling = list(set(self.study.study_massif_names) - set(massif_names)) - nb_massif_for_sampling = self.nb_massif_for_change_point_test - len(AbstractExtendedStudy.real_region_names) + nb_massif_for_sampling = self.nb_massif_for_change_point_test - len( + AbstractExtendedStudy.real_region_names) massif_names += sample(massif_names_for_sampling, k=nb_massif_for_sampling) else: massif_names = sample(self.study.study_massif_names, k=self.nb_massif_for_change_point_test) diff --git a/experiment/meteo_france_data/stations_data/comparison_analysis.py b/experiment/meteo_france_data/stations_data/comparison_analysis.py index c85b1cdc..ee516027 100644 --- a/experiment/meteo_france_data/stations_data/comparison_analysis.py +++ b/experiment/meteo_france_data/stations_data/comparison_analysis.py @@ -20,7 +20,7 @@ from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \ AbstractSpatioTemporalObservations from test.test_utils import load_test_max_stable_models -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type REANALYSE_STR = 'reanalyse' ALTITUDE_COLUMN_NAME = 'ALTITUDE' diff --git a/experiment/meteo_france_data/stations_data/main_spatial_comparison.py b/experiment/meteo_france_data/stations_data/main_spatial_comparison.py index 80374664..b2bdf90a 100644 --- a/experiment/meteo_france_data/stations_data/main_spatial_comparison.py +++ b/experiment/meteo_france_data/stations_data/main_spatial_comparison.py @@ -2,7 +2,7 @@ from experiment.meteo_france_data.stations_data.comparison_analysis import Compa from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import LinearAllParametersAllDimsMarginModel from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \ BetweenZeroAndOneNormalization -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type def choice_of_altitude_and_nb_border_data_to_remove_to_get_data_without_nan(): diff --git a/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py b/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py index af0fd9b5..7be7297a 100644 --- a/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py +++ b/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py @@ -15,7 +15,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat from experiment.meteo_france_data.stations_data.comparison_analysis import ComparisonAnalysis, MASSIF_COLUMN_NAME, \ REANALYSE_STR, ALTITUDE_COLUMN_NAME, STATION_COLUMN_NAME from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest -from experiment.trend_analysis.univariate_test.utils import compute_gev_change_point_test_results +from experiment.trend_analysis.univariate_test.univariate_test_results import compute_gev_change_point_test_results from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates path = r'/home/erwan/Documents/projects/spatiotemporalextremes/experiment/meteo_france_data/stations_data/csv' diff --git a/experiment/robustness_plot/single_plot.py b/experiment/robustness_plot/single_plot.py index 70f68923..e2fe7133 100644 --- a/experiment/robustness_plot/single_plot.py +++ b/experiment/robustness_plot/single_plot.py @@ -7,7 +7,7 @@ from itertools import product from extreme_fit.estimator.abstract_estimator import AbstractEstimator from experiment.robustness_plot.display_item import DisplayItem -from utils import get_full_path +from root_utils import get_full_path plt.style.use('seaborn-white') diff --git a/experiment/simulation/abstract_simulation.py b/experiment/simulation/abstract_simulation.py index 9e7370c5..bc4e14e3 100644 --- a/experiment/simulation/abstract_simulation.py +++ b/experiment/simulation/abstract_simulation.py @@ -23,7 +23,7 @@ from extreme_fit.distribution.gev.gev_params import GevParams from spatio_temporal_dataset.dataset.abstract_dataset import get_subset_dataset from spatio_temporal_dataset.dataset.simulation_dataset import SimulatedDataset from spatio_temporal_dataset.slicer.split import split_to_display_kwargs -from utils import get_full_path, get_display_name_from_object_type +from root_utils import get_full_path, get_display_name_from_object_type SIMULATION_RELATIVE_PATH = op.join('local', 'simulation') diff --git a/experiment/trend_analysis/non_stationary_trends.py b/experiment/trend_analysis/non_stationary_trends.py index 3b6a362e..ab4debc4 100644 --- a/experiment/trend_analysis/non_stationary_trends.py +++ b/experiment/trend_analysis/non_stationary_trends.py @@ -17,7 +17,7 @@ from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_m from extreme_fit.model.margin_model.margin_function.linear_margin_function import LinearMarginFunction from extreme_fit.model.utils import OptimizationConstants from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type class AbstractNonStationaryTrendTest(object): diff --git a/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py b/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py index af9ae899..cbea66c2 100644 --- a/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py +++ b/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py @@ -3,49 +3,38 @@ import pandas as pd from cached_property import cached_property from scipy.stats import chi2 +from experiment.eurocode_data.utils import EUROCODE_QUANTILE from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest +from experiment.trend_analysis.univariate_test.utils import load_temporal_coordinates_and_dataset, \ + fitted_linear_margin_estimator from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator -from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import AbstractTemporalLinearMarginModel +from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ + AbstractTemporalLinearMarginModel from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import \ StationaryStationModel from extreme_fit.model.utils import SafeRunException from extreme_fit.distribution.gev.gev_params import GevParams from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates -from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \ - AbstractTemporalCoordinates -from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.abstract_transformation import \ - CenteredScaledNormalization -from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset -from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \ - AbstractSpatioTemporalObservations class AbstractGevTrendTest(AbstractUnivariateTest): RRunTimeError_TREND = 'R RunTimeError trend' # I should use the quantile from the Eurocode for the buildings - quantile_for_strength = 0.98 + quantile_for_strength = EUROCODE_QUANTILE nb_years_for_quantile_evolution = 10 - def __init__(self, years, maxima, starting_year, unconstrained_model_class, constrained_model_class=StationaryStationModel, + def __init__(self, years, maxima, starting_year, unconstrained_model_class, + constrained_model_class=StationaryStationModel, fit_method=AbstractTemporalLinearMarginModel.ISMEV_GEV_FIT_METHOD_STR): super().__init__(years, maxima, starting_year) self.fit_method = fit_method # Load observations, coordinates and datasets - df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: years}) - df_maxima_gev = pd.DataFrame(maxima, index=df.index) - observations = AbstractSpatioTemporalObservations(df_maxima_gev=df_maxima_gev) - self.coordinates = AbstractTemporalCoordinates.from_df(df, transformation_class=CenteredScaledNormalization) - self.dataset = AbstractDataset(observations=observations, coordinates=self.coordinates) - + self.coordinates, self.dataset = load_temporal_coordinates_and_dataset(maxima, years) try: # Fit constrained model - constrained_model = constrained_model_class(self.coordinates, starting_point=self.starting_year, fit_method=self.fit_method) - self.constrained_estimator = LinearMarginEstimator(self.dataset, constrained_model) - self.constrained_estimator.fit() + self.constrained_estimator = fitted_linear_margin_estimator(constrained_model_class, self.coordinates, self.dataset, self.starting_year, self.fit_method) # Fit unconstrained model - unconstrained_model = unconstrained_model_class(self.coordinates, starting_point=self.starting_year, fit_method=self.fit_method) - self.unconstrained_estimator = LinearMarginEstimator(self.dataset, unconstrained_model) - self.unconstrained_estimator.fit() + self.unconstrained_estimator = fitted_linear_margin_estimator(unconstrained_model_class, self.coordinates, self.dataset, self.starting_year, self.fit_method) self.crashed = False except SafeRunException: self.crashed = True @@ -127,7 +116,6 @@ class AbstractGevTrendTest(AbstractUnivariateTest): return self.constrained_estimator.margin_function_from_fit.get_gev_params(coordinate=np.array([1958]), is_transformed=False) - @property def test_trend_slope_strength(self): if self.crashed: @@ -136,7 +124,8 @@ class AbstractGevTrendTest(AbstractUnivariateTest): # Compute the slope strength slope = self._slope_strength() # Delta T must in the same unit as were the parameter of slope mu1 and sigma1 - slope *= self.nb_years_for_quantile_evolution * self.coordinates.transformed_distance_between_two_successive_years[0] + slope *= self.nb_years_for_quantile_evolution * \ + self.coordinates.transformed_distance_between_two_successive_years[0] return slope def _slope_strength(self): @@ -163,6 +152,3 @@ class AbstractGevTrendTest(AbstractUnivariateTest): return 0.0 else: return self.non_stationary_constant_gev_params.quantile(p=self.quantile_for_strength) - - - diff --git a/experiment/trend_analysis/univariate_test/univariate_test_results.py b/experiment/trend_analysis/univariate_test/univariate_test_results.py new file mode 100644 index 00000000..aeea3f63 --- /dev/null +++ b/experiment/trend_analysis/univariate_test/univariate_test_results.py @@ -0,0 +1,44 @@ +from multiprocessing.pool import Pool + +import numpy as np + +from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import AbstractGevTrendTest +from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ + AbstractTemporalLinearMarginModel +from root_utils import NB_CORES + + +def compute_gev_change_point_test_result(smooth_maxima, starting_year, trend_test_class, years, fit_method=AbstractTemporalLinearMarginModel.ISMEV_GEV_FIT_METHOD_STR): + trend_test = trend_test_class(years, smooth_maxima, starting_year) # type: AbstractGevTrendTest + assert isinstance(trend_test, AbstractGevTrendTest) + return trend_test.test_trend_type, \ + trend_test.test_trend_slope_strength, \ + trend_test.unconstained_nllh, \ + trend_test.test_trend_constant_quantile, \ + trend_test.mean_difference_same_sign_as_slope_strenght, \ + trend_test.variance_difference_same_sign_as_slope_strenght, \ + trend_test.unconstrained_model_deviance, \ + trend_test.constrained_model_deviance + + +def compute_gev_change_point_test_results(multiprocessing, maxima, starting_years, trend_test_class, + years): + if multiprocessing: + list_args = [(maxima, starting_year, trend_test_class, years) for starting_year in + starting_years] + with Pool(NB_CORES) as p: + trend_test_res = p.starmap(compute_gev_change_point_test_result, list_args) + else: + trend_test_res = [ + compute_gev_change_point_test_result(maxima, starting_year, trend_test_class, years) + for starting_year in starting_years] + # Keep only the most likely starting year + # (i.e. the starting year that minimizes its negative log likelihood) + # (set all the other data to np.nan so that they will not be taken into account in mean function) + best_idx = list(np.argmin(trend_test_res, axis=0))[2] + # print(best_idx, trend_test_res) + best_idxs = [best_idx] + # todo: by doing a sorting on the deviance, I could get the nb_top_likelihood_values values + # best_idxs = list(np.argmax(trend_test_res, axis=0))[-nb_top_likelihood_values:] + + return trend_test_res, best_idxs diff --git a/experiment/trend_analysis/univariate_test/utils.py b/experiment/trend_analysis/univariate_test/utils.py index 8c208ec2..75e4f7ff 100644 --- a/experiment/trend_analysis/univariate_test/utils.py +++ b/experiment/trend_analysis/univariate_test/utils.py @@ -1,44 +1,27 @@ -from multiprocessing.pool import Pool +import pandas as pd -import numpy as np +from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator +from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates +from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \ + AbstractTemporalCoordinates +from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.abstract_transformation import \ + CenteredScaledNormalization +from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset +from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \ + AbstractSpatioTemporalObservations -from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import AbstractGevTrendTest -from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ - AbstractTemporalLinearMarginModel -from utils import NB_CORES +def load_temporal_coordinates_and_dataset(maxima, years): + df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: years}) + df_maxima_gev = pd.DataFrame(maxima, index=df.index) + observations = AbstractSpatioTemporalObservations(df_maxima_gev=df_maxima_gev) + coordinates = AbstractTemporalCoordinates.from_df(df, transformation_class=CenteredScaledNormalization) + dataset = AbstractDataset(observations=observations, coordinates=coordinates) + return coordinates, dataset -def compute_gev_change_point_test_result(smooth_maxima, starting_year, trend_test_class, years, fit_method=AbstractTemporalLinearMarginModel.ISMEV_GEV_FIT_METHOD_STR): - trend_test = trend_test_class(years, smooth_maxima, starting_year) # type: AbstractGevTrendTest - assert isinstance(trend_test, AbstractGevTrendTest) - return trend_test.test_trend_type, \ - trend_test.test_trend_slope_strength, \ - trend_test.unconstained_nllh, \ - trend_test.test_trend_constant_quantile, \ - trend_test.mean_difference_same_sign_as_slope_strenght, \ - trend_test.variance_difference_same_sign_as_slope_strenght, \ - trend_test.unconstrained_model_deviance, \ - trend_test.constrained_model_deviance - -def compute_gev_change_point_test_results(multiprocessing, maxima, starting_years, trend_test_class, - years): - if multiprocessing: - list_args = [(maxima, starting_year, trend_test_class, years) for starting_year in - starting_years] - with Pool(NB_CORES) as p: - trend_test_res = p.starmap(compute_gev_change_point_test_result, list_args) - else: - trend_test_res = [ - compute_gev_change_point_test_result(maxima, starting_year, trend_test_class, years) - for starting_year in starting_years] - # Keep only the most likely starting year - # (i.e. the starting year that minimizes its negative log likelihood) - # (set all the other data to np.nan so that they will not be taken into account in mean function) - best_idx = list(np.argmin(trend_test_res, axis=0))[2] - # print(best_idx, trend_test_res) - best_idxs = [best_idx] - # todo: by doing a sorting on the deviance, I could get the nb_top_likelihood_values values - # best_idxs = list(np.argmax(trend_test_res, axis=0))[-nb_top_likelihood_values:] - - return trend_test_res, best_idxs +def fitted_linear_margin_estimator(model_class, coordinates, dataset, starting_year, fit_method): + model = model_class(coordinates, starting_point=starting_year, fit_method=fit_method) + estimator = LinearMarginEstimator(dataset, model) + estimator.fit() + return estimator diff --git a/extreme_fit/distribution/gev/main_fevd_fixed.R b/extreme_fit/distribution/gev/main_fevd_fixed.R index 6ecf87bb..6c9dca5b 100644 --- a/extreme_fit/distribution/gev/main_fevd_fixed.R +++ b/extreme_fit/distribution/gev/main_fevd_fixed.R @@ -50,6 +50,7 @@ m = res$results print(dim(m)) print(m[1,]) print(m[1,1]) +print(res$chain.info[1,]) diff --git a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py index da993d39..cc8ae955 100644 --- a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py +++ b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py @@ -31,8 +31,8 @@ class LinearMarginEstimator(AbstractMarginEstimator): df_coordinates_temp = self.dataset.coordinates.df_temporal_coordinates_for_fit(split=self.train_split, starting_point=self.margin_model.starting_point) return self.margin_model.fitmargin_from_maxima_gev(data=maxima_gev_specialized, - df_coordinates_spat=df_coordinates_spat, - df_coordinates_temp=df_coordinates_temp) + df_coordinates_spat=df_coordinates_spat, + df_coordinates_temp=df_coordinates_temp) @cached_property def margin_function_from_fit(self) -> LinearMarginFunction: diff --git a/extreme_fit/estimator/utils.py b/extreme_fit/estimator/utils.py index 4f27e949..dec62318 100644 --- a/extreme_fit/estimator/utils.py +++ b/extreme_fit/estimator/utils.py @@ -3,8 +3,10 @@ from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model impo from extreme_fit.model.margin_model.margin_function.linear_margin_function import LinearMarginFunction -def load_margin_function(estimator: AbstractEstimator, margin_model: LinearMarginModel, margin_function_class=LinearMarginFunction): +def load_margin_function(estimator: AbstractEstimator, margin_model: LinearMarginModel, margin_function_class=LinearMarginFunction, coef_dict=None): + if coef_dict is None: + coef_dict = estimator.result_from_model_fit.margin_coef_dict return margin_function_class.from_coef_dict(coordinates=estimator.dataset.coordinates, gev_param_name_to_dims=margin_model.margin_function_start_fit.gev_param_name_to_dims, - coef_dict=estimator.result_from_model_fit.margin_coef_dict, + coef_dict=coef_dict, starting_point=margin_model.starting_point) diff --git a/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py b/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py index f67a5b85..1b96be1d 100644 --- a/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py +++ b/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py @@ -5,7 +5,7 @@ from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model impo from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import AbstractResultFromModelFit from extreme_fit.model.result_from_model_fit.result_from_extremes import ResultFromExtremes from extreme_fit.model.result_from_model_fit.result_from_ismev import ResultFromIsmev -from extreme_fit.model.utils import r, ro, get_null +from extreme_fit.model.utils import r, ro, get_null, get_margin_formula_extremes from extreme_fit.model.utils import safe_run_r_estimator from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates @@ -46,6 +46,7 @@ class AbstractTemporalLinearMarginModel(LinearMarginModel): # 9)), iter = 5000, verbose = TRUE, use.phi = FALSE) r_type_argument_kwargs = {'use.phi': False} + r_type_argument_kwargs.update(get_margin_formula_extremes(self.margin_function_start_fit.form_dict)) # location.fun = ~1, # scale.fun = ~1, shape.fun = ~1 # , priorParams = list(q=c(6), p=c(9)) diff --git a/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py b/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py index 9dd671bc..fb10aa47 100644 --- a/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py +++ b/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py @@ -9,7 +9,7 @@ from extreme_fit.distribution.gev.gev_params import GevParams from experiment.meteo_france_data.plot.create_shifted_cmap import imshow_shifted from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates from spatio_temporal_dataset.slicer.split import Split -from utils import cached_property +from root_utils import cached_property class AbstractMarginFunction(object): diff --git a/extreme_fit/model/margin_model/parametric_margin_model.py b/extreme_fit/model/margin_model/parametric_margin_model.py index 9a762b14..35ae8fa5 100644 --- a/extreme_fit/model/margin_model/parametric_margin_model.py +++ b/extreme_fit/model/margin_model/parametric_margin_model.py @@ -8,7 +8,7 @@ from extreme_fit.model.margin_model.margin_function.parametric_margin_function i from extreme_fit.model.result_from_model_fit.result_from_spatial_extreme import ResultFromSpatialExtreme from extreme_fit.model.margin_model.abstract_margin_model import AbstractMarginModel from extreme_fit.model.utils import safe_run_r_estimator, r, get_coord, \ - get_margin_formula + get_margin_formula_spatial_extreme from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates @@ -29,7 +29,7 @@ class ParametricMarginModel(AbstractMarginModel, ABC): assert data.shape[1] == len(df_coordinates_spat) # Margin formula for fitspatgev - fit_params = get_margin_formula(self.margin_function_start_fit.form_dict) + fit_params = get_margin_formula_spatial_extreme(self.margin_function_start_fit.form_dict) # Covariables covariables = get_coord(df_coordinates=df_coordinates_spat) diff --git a/extreme_fit/model/max_stable_model/abstract_max_stable_model.py b/extreme_fit/model/max_stable_model/abstract_max_stable_model.py index b2daca59..3d6a4f61 100644 --- a/extreme_fit/model/max_stable_model/abstract_max_stable_model.py +++ b/extreme_fit/model/max_stable_model/abstract_max_stable_model.py @@ -8,7 +8,7 @@ from rpy2.rinterface._rinterface import RRuntimeError from extreme_fit.model.abstract_model import AbstractModel from extreme_fit.model.result_from_model_fit.result_from_spatial_extreme import ResultFromSpatialExtreme from extreme_fit.model.utils import r, safe_run_r_estimator, get_coord, \ - get_margin_formula, SafeRunException + get_margin_formula_spatial_extreme, SafeRunException from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates @@ -54,7 +54,7 @@ class AbstractMaxStableModel(AbstractModel): start_dict = self.remove_unused_parameters(start_dict, fitmaxstab_with_one_dimensional_data) if fit_marge: start_dict.update(margin_start_dict) - margin_formulas = get_margin_formula(fit_marge_form_dict) + margin_formulas = get_margin_formula_spatial_extreme(fit_marge_form_dict) fit_params.update(margin_formulas) if fitmaxstab_with_one_dimensional_data: fit_params['iso'] = True diff --git a/extreme_fit/model/result_from_model_fit/result_from_extremes.py b/extreme_fit/model/result_from_model_fit/result_from_extremes.py index 7bb02b57..162fe628 100644 --- a/extreme_fit/model/result_from_model_fit/result_from_extremes.py +++ b/extreme_fit/model/result_from_model_fit/result_from_extremes.py @@ -1,17 +1,54 @@ +import numpy as np +import pandas as pd from rpy2 import robjects +from extreme_fit.distribution.gev.gev_params import GevParams from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import \ AbstractResultFromModelFit +from extreme_fit.model.result_from_model_fit.utils import get_margin_coef_dict class ResultFromExtremes(AbstractResultFromModelFit): - def __init__(self, result_from_fit: robjects.ListVector, gev_param_name_to_dim=None) -> None: + def __init__(self, result_from_fit: robjects.ListVector, gev_param_name_to_dim=None, + burn_in_percentage=0.1) -> None: super().__init__(result_from_fit) + self.burn_in_percentage = burn_in_percentage self.gev_param_name_to_dim = gev_param_name_to_dim - print(list(self.name_to_value.keys())) - # @property + @property + def results(self): + return np.array(self.name_to_value['results']) + + @property + def chain_info(self): + return np.array(self.name_to_value['chain.info']) + + @property + def df_posterior_samples(self) -> pd.DataFrame: + d = dict(zip(GevParams.PARAM_NAMES, self.results.transpose())) + d['loglik'] = self.chain_info[:, -2] + d['prior'] = self.chain_info[:, -1] + df_all_samples = pd.DataFrame(d) + # Remove the burn in period + burn_in_last_index = int(self.burn_in_percentage * len(df_all_samples)) + df_posterior_samples = df_all_samples.iloc[burn_in_last_index:, :] + return df_posterior_samples + + def get_coef_dict_from_posterior_sample(self, s: pd.Series): + assert len(s) >= 3 + values = {i: v for i, v in enumerate(s)} + return get_margin_coef_dict(self.gev_param_name_to_dim, values) + + @property + def margin_coef_dict(self): + """ It is the coef for the margin function corresponding to the mean posterior parameters """ + mean_posterior_parameters = self.df_posterior_samples.iloc[:, :-2].mean(axis=0) + return self.get_coef_dict_from_posterior_sample(mean_posterior_parameters) + + + +# @property # def # @property @@ -49,4 +86,3 @@ class ResultFromExtremes(AbstractResultFromModelFit): # @property # def convergence(self) -> str: # return convertFloatVector_to_float(self.name_to_value['conv']) == 0 - diff --git a/extreme_fit/model/result_from_model_fit/result_from_ismev.py b/extreme_fit/model/result_from_model_fit/result_from_ismev.py index e3691fc2..d83dacec 100644 --- a/extreme_fit/model/result_from_model_fit/result_from_ismev.py +++ b/extreme_fit/model/result_from_model_fit/result_from_ismev.py @@ -1,14 +1,13 @@ -from typing import Dict +from rpy2 import robjects -import numpy as np +from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import \ + AbstractResultFromModelFit +from extreme_fit.model.result_from_model_fit.utils import convertFloatVector_to_float, get_margin_coef_dict from rpy2 import robjects -from extreme_fit.model.margin_model.param_function.linear_coef import LinearCoef from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import \ AbstractResultFromModelFit from extreme_fit.model.result_from_model_fit.utils import convertFloatVector_to_float -from extreme_fit.distribution.gev.gev_params import GevParams -from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates class ResultFromIsmev(AbstractResultFromModelFit): @@ -19,23 +18,7 @@ class ResultFromIsmev(AbstractResultFromModelFit): @property def margin_coef_dict(self): - assert self.gev_param_name_to_dim is not None - # Build the Coeff dict from gev_param_name_to_dim - coef_dict = {} - i = 0 - mle_values = self.name_to_value['mle'] - for gev_param_name in GevParams.PARAM_NAMES: - # Add intercept - intercept_coef_name = LinearCoef.coef_template_str(gev_param_name, LinearCoef.INTERCEPT_NAME).format(1) - coef_dict[intercept_coef_name] = mle_values[i] - i += 1 - # Add a potential linear temporal trend - if gev_param_name in self.gev_param_name_to_dim: - temporal_coef_name = LinearCoef.coef_template_str(gev_param_name, - AbstractCoordinates.COORDINATE_T).format(1) - coef_dict[temporal_coef_name] = mle_values[i] - i += 1 - return coef_dict + return get_margin_coef_dict(self.gev_param_name_to_dim, self.name_to_value['mle']) @property def all_parameters(self): diff --git a/extreme_fit/model/result_from_model_fit/utils.py b/extreme_fit/model/result_from_model_fit/utils.py index ebd20e93..6d559b4f 100644 --- a/extreme_fit/model/result_from_model_fit/utils.py +++ b/extreme_fit/model/result_from_model_fit/utils.py @@ -1,5 +1,28 @@ import numpy as np +from extreme_fit.distribution.gev.gev_params import GevParams +from extreme_fit.model.margin_model.param_function.linear_coef import LinearCoef +from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates + def convertFloatVector_to_float(f): return np.array(f)[0] + + +def get_margin_coef_dict(gev_param_name_to_dim, mle_values): + assert gev_param_name_to_dim is not None + # Build the Coeff dict from gev_param_name_to_dim + coef_dict = {} + i = 0 + for gev_param_name in GevParams.PARAM_NAMES: + # Add intercept + intercept_coef_name = LinearCoef.coef_template_str(gev_param_name, LinearCoef.INTERCEPT_NAME).format(1) + coef_dict[intercept_coef_name] = mle_values[i] + i += 1 + # Add a potential linear temporal trend + if gev_param_name in gev_param_name_to_dim: + temporal_coef_name = LinearCoef.coef_template_str(gev_param_name, + AbstractCoordinates.COORDINATE_T).format(1) + coef_dict[temporal_coef_name] = mle_values[i] + i += 1 + return coef_dict diff --git a/extreme_fit/model/utils.py b/extreme_fit/model/utils.py index f7ffaf04..aec823bf 100644 --- a/extreme_fit/model/utils.py +++ b/extreme_fit/model/utils.py @@ -1,26 +1,23 @@ import io import os.path as op +import random import warnings from contextlib import redirect_stdout +from typing import Dict import numpy as np -import random -import sys -from types import TracebackType -from typing import Dict, Optional - import pandas as pd import rpy2.robjects as ro from rpy2 import robjects from rpy2.rinterface import RRuntimeWarning from rpy2.rinterface._rinterface import RRuntimeError - from rpy2.robjects import numpy2ri from rpy2.robjects import pandas2ri -from utils import get_root_path # Load R variables +from root_utils import get_root_path + r = ro.R() numpy2ri.activate() pandas2ri.activate() @@ -128,10 +125,27 @@ def get_null(): return as_null(1.0) -def get_margin_formula(fit_marge_form_dict) -> Dict: +# todo: move that to the result class maybe +def get_margin_formula_spatial_extreme(fit_marge_form_dict) -> Dict: margin_formula = {k: robjects.Formula(v) if v != 'NULL' else get_null() for k, v in fit_marge_form_dict.items()} return margin_formula + +def old_coef_name_to_new_coef_name(): + return { + 'temp.form.loc': 'location.fun', + 'temp.form.scale': 'scale.fun', + 'temp.form.shape': 'shape.fun', + } + + +def get_margin_formula_extremes(fit_marge_form_dict) -> Dict: + d = old_coef_name_to_new_coef_name() + form_dict = {d[k]: v if v != 'NULL' else ' ~1' for k, v in fit_marge_form_dict.items()} + return {k: robjects.Formula(v) for k, v in form_dict.items()} + + + # def conversion_to_FloatVector(data): # """Convert DataFrame or numpy array into FloatVector for r""" # if isinstance(data, pd.DataFrame): diff --git a/utils.py b/root_utils.py similarity index 100% rename from utils.py rename to root_utils.py diff --git a/spatio_temporal_dataset/coordinates/abstract_coordinates.py b/spatio_temporal_dataset/coordinates/abstract_coordinates.py index 290bbe31..16cc04ad 100644 --- a/spatio_temporal_dataset/coordinates/abstract_coordinates.py +++ b/spatio_temporal_dataset/coordinates/abstract_coordinates.py @@ -251,7 +251,9 @@ class AbstractCoordinates(object): # Compute the indices to modify ind_to_modify = df_temporal_coordinates.iloc[:, 0] <= starting_point # type: pd.Series # Assert that some coordinates are selected but not all - assert 0 < sum(ind_to_modify) < len(ind_to_modify), sum(ind_to_modify) + msg = '{} First year={} Last_year={}'.format(sum(ind_to_modify), df_temporal_coordinates.iloc[0, 0], + df_temporal_coordinates.iloc[-1, 0]) + assert 0 < sum(ind_to_modify) < len(ind_to_modify), msg # Modify the temporal coordinates to enforce the stationarity df_temporal_coordinates.loc[ind_to_modify] = starting_point # Load the temporal transformation object diff --git a/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py b/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py index 0f1df96d..8007bfd9 100644 --- a/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py +++ b/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py @@ -7,7 +7,7 @@ from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_co from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.tranformation_3D import \ AnisotropyTransformation from spatio_temporal_dataset.coordinates.transformed_coordinates.transformed_coordinates import TransformedCoordinates -from utils import get_full_path +from root_utils import get_full_path class AlpsStation3DCoordinates(AbstractSpatialCoordinates): diff --git a/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py index f69ed0b4..83ead954 100644 --- a/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py +++ b/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py @@ -2,7 +2,7 @@ import os.path as op from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \ AbstractSpatioTemporalObservations -from utils import get_full_path +from root_utils import get_full_path class AlpsPrecipitationObservations(AbstractSpatioTemporalObservations): diff --git a/test/test_experiment/test_SCM_study.py b/test/test_experiment/test_SCM_study.py index 8c0e1e67..7a0aa485 100644 --- a/test/test_experiment/test_SCM_study.py +++ b/test/test_experiment/test_SCM_study.py @@ -13,7 +13,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ StudyVisualizer from experiment.trend_analysis.univariate_test.gev_trend_test_one_parameter import GevLocationTrendTest -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type class TestSCMAllStudy(unittest.TestCase): diff --git a/test/test_experiment/test_coordinate_sensitivity.py b/test/test_experiment/test_coordinate_sensitivity.py index 081d5ce0..e709029a 100644 --- a/test/test_experiment/test_coordinate_sensitivity.py +++ b/test/test_experiment/test_coordinate_sensitivity.py @@ -9,7 +9,7 @@ from experiment.trend_analysis.non_stationary_trends import \ ConditionalIndedendenceLocationTrendTest from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \ BetweenZeroAndOneNormalization, BetweenZeroAndOneNormalizationMinEpsilon, BetweenZeroAndOneNormalizationMaxEpsilon -from utils import get_display_name_from_object_type +from root_utils import get_display_name_from_object_type class TestCoordinateSensitivity(unittest.TestCase): diff --git a/test/test_experiment/test_region_eurocode.py b/test/test_experiment/test_region_eurocode.py index 97e7d3df..4aa64fb1 100644 --- a/test/test_experiment/test_region_eurocode.py +++ b/test/test_experiment/test_region_eurocode.py @@ -1,21 +1,24 @@ import unittest from collections import OrderedDict -from experiment.eurocode_data.eurocode_visualizer import display_region_limit -from experiment.eurocode_data.eurocode_region import C1, E, C2 +from experiment.eurocode_data.eurocode_return_level_uncertainties import EurocodeLevelUncertaintyFromExtremes +from experiment.eurocode_data.eurocode_visualizer import plot_model_name_to_dep_to_ordered_return_level_uncertainties +from experiment.eurocode_data.massif_name_to_departement import DEPARTEMENT_TYPES +from experiment.eurocode_data.utils import EUROCODE_ALTITUDES class TestCoordinateSensitivity(unittest.TestCase): DISPLAY = False - def test_region_eurocode(self): - altitudes = [900, 1200, 1500, 1800] - ordered_massif_name_to_quantiles = OrderedDict() - ordered_massif_name_to_quantiles['Vanoise'] = [1.2, 1.5, 1.7, 2.1] - ordered_massif_name_to_quantiles['Vercors'] = [0.7, 0.8, 1.1, 1.5] - display_region_limit(C1, altitudes, ordered_massif_name_to_quantiles, display=self.DISPLAY) - display_region_limit(C2, altitudes, ordered_massif_name_to_quantiles, display=self.DISPLAY) - display_region_limit(E, altitudes, ordered_massif_name_to_quantiles, display=self.DISPLAY) + def test_departement_eurocode_plot(self): + # Create an example + example = EurocodeLevelUncertaintyFromExtremes(posterior_mean=1.0, + poster_uncertainty_interval=(0.5, 1.25)) + dep_to_model_name_toreturn_level_uncertainty = { + dep: {"example": [example for _ in EUROCODE_ALTITUDES]} for dep in DEPARTEMENT_TYPES + } + plot_model_name_to_dep_to_ordered_return_level_uncertainties(dep_to_model_name_toreturn_level_uncertainty, + show=self.DISPLAY) if __name__ == '__main__': diff --git a/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py b/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py index 5f5ba22c..2b9aad9a 100644 --- a/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py +++ b/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py @@ -3,11 +3,14 @@ import unittest import numpy as np import pandas as pd +from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import fitted_linear_margin_estimator +from extreme_fit.distribution.gev.gev_params import GevParams from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ AbstractTemporalLinearMarginModel from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import StationaryStationModel, \ NonStationaryLocationStationModel +from extreme_fit.model.result_from_model_fit.result_from_extremes import ResultFromExtremes from extreme_fit.model.utils import r, set_seed_r from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \ @@ -18,74 +21,50 @@ from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_tempor from test.test_utils import load_non_stationary_temporal_margin_models -# class TestGevTemporalBayesian(unittest.TestCase): -# -# def setUp(self) -> None: -# set_seed_r() -# r(""" -# N <- 50 -# loc = 0; scale = 1; shape <- 1 -# x_gev <- rgev(N, loc = loc, scale = scale, shape = shape) -# start_loc = 0; start_scale = 1; start_shape = 1 -# """) -# # Compute the stationary temporal margin with isMev -# self.start_year = 0 -# df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: range(self.start_year, self.start_year + 50)}) -# self.coordinates = AbstractTemporalCoordinates.from_df(df) -# df2 = pd.DataFrame(data=np.array(r['x_gev']), index=df.index) -# observations = AbstractSpatioTemporalObservations(df_maxima_gev=df2) -# self.dataset = AbstractDataset(observations=observations, coordinates=self.coordinates) -# self.fit_method = AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR -# -# def test_gev_temporal_margin_fit_stationary(self): -# # Create estimator -# margin_model = StationaryStationModel(self.coordinates, fit_method=self.fit_method) -# estimator = LinearMarginEstimator(self.dataset, margin_model) -# estimator.fit() -# ref = {'loc': 0.0219, 'scale': 1.0347, 'shape': 0.8295} -# for year in range(1, 3): -# mle_params_estimated = estimator.margin_function_fitted.get_gev_params(np.array([year])).to_dict() -# for key in ref.keys(): -# self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3) +class TestGevTemporalBayesian(unittest.TestCase): - # def test_gev_temporal_margin_fit_nonstationary(self): - # # Create estimator - # margin_models = load_non_stationary_temporal_margin_models(self.coordinates) - # for margin_model in margin_models: - # # margin_model = NonStationaryLocationStationModel(self.coordinates) - # estimator = LinearMarginEstimator(self.dataset, margin_model) - # estimator.fit() - # # Checks that parameters returned are indeed different - # mle_params_estimated_year1 = estimator.margin_function_fitted.get_gev_params(np.array([1])).to_dict() - # mle_params_estimated_year3 = estimator.margin_function_fitted.get_gev_params(np.array([3])).to_dict() - # self.assertNotEqual(mle_params_estimated_year1, mle_params_estimated_year3) - # - # def test_gev_temporal_margin_fit_nonstationary_with_start_point(self): + def setUp(self) -> None: + set_seed_r() + r(""" + N <- 50 + loc = 0; scale = 1; shape <- 1 + x_gev <- rgev(N, loc = loc, scale = scale, shape = shape) + start_loc = 0; start_scale = 1; start_shape = 1 + """) + # Compute the stationary temporal margin with isMev + self.start_year = 0 + df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: range(self.start_year, self.start_year + 50)}) + self.coordinates = AbstractTemporalCoordinates.from_df(df) + df2 = pd.DataFrame(data=np.array(r['x_gev']), index=df.index) + observations = AbstractSpatioTemporalObservations(df_maxima_gev=df2) + self.dataset = AbstractDataset(observations=observations, coordinates=self.coordinates) + self.fit_method = AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR + + def test_gev_temporal_margin_fit_stationary(self): + # Create estimator + estimator_fitted = fitted_linear_margin_estimator(StationaryStationModel, self.coordinates, self.dataset, + starting_year=0, + fit_method=AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR) + ref = {'loc': 0.082261, 'scale': 1.183703, 'shape': 0.882750} + for year in range(1, 3): + mle_params_estimated = estimator_fitted.margin_function_from_fit.get_gev_params(np.array([year])).to_dict() + for key in ref.keys(): + self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3) + + # def test_gev_temporal_margin_fit_non_stationary(self): # # Create estimator - # estimator = self.fit_non_stationary_estimator(starting_point=3) - # self.assertNotEqual(estimator.margin_function_fitted.mu1_temporal_trend, 0.0) - # # Checks starting point parameter are well passed - # self.assertEqual(3, estimator.margin_function_fitted.starting_point) - # # Checks that parameters returned are indeed different - # mle_params_estimated_year1 = estimator.margin_function_fitted.get_gev_params(np.array([1])).to_dict() - # mle_params_estimated_year3 = estimator.margin_function_fitted.get_gev_params(np.array([3])).to_dict() - # self.assertEqual(mle_params_estimated_year1, mle_params_estimated_year3) - # mle_params_estimated_year5 = estimator.margin_function_fitted.get_gev_params(np.array([5])).to_dict() - # self.assertNotEqual(mle_params_estimated_year5, mle_params_estimated_year3) - # - # def fit_non_stationary_estimator(self, starting_point): - # margin_model = NonStationaryLocationStationModel(self.coordinates, starting_point=starting_point + self.start_year) - # estimator = LinearMarginEstimator(self.dataset, margin_model) - # estimator.fit() - # return estimator - # - # def test_two_different_starting_points(self): - # # Create two different estimators - # estimator1 = self.fit_non_stationary_estimator(starting_point=3) - # estimator2 = self.fit_non_stationary_estimator(starting_point=28) - # mu1_estimator1 = estimator1.margin_function_fitted.mu1_temporal_trend - # mu1_estimator2 = estimator2.margin_function_fitted.mu1_temporal_trend - # self.assertNotEqual(mu1_estimator1, mu1_estimator2) + # estimator_fitted = fitted_linear_margin_estimator(NonStationaryLocationStationModel, self.coordinates, self.dataset, + # starting_year=0, + # fit_method=AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR) + # result_from_model_fit = estimator_fitted.result_from_model_fit # type: ResultFromExtremes + # print(result_from_model_fit.posterior_samples) + + # print(estimator.result_from_model_fit.r) + # ref = {'loc': 0.0219, 'scale': 1.0347, 'shape': 0.8295} + # for year in range(1, 3): + # mle_params_estimated = estimator.margin_function_from_fit.get_gev_params(np.array([year])).to_dict() + # for key in ref.keys(): + # self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3) if __name__ == '__main__': -- GitLab