diff --git a/experiment/meteo_france_data/plot/mask_poly.py b/experiment/meteo_france_data/plot/mask_poly.py deleted file mode 100644 index d7a478680b54176a7444f73bae5455140eb196ae..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/plot/mask_poly.py +++ /dev/null @@ -1,44 +0,0 @@ -import matplotlib.pyplot as plt -import matplotlib.patches as mpatches -import matplotlib.path as mpath - - -# from: https://stackoverflow.com/questions/3320311/fill-outside-of-polygon-mask-array-where-indicies-are-beyond-a-circular-bounda -def mask_outside_polygon(poly_verts, ax=None): - """ - Plots a mask on the specified axis ("ax", defaults to plt.gca()) such that - all areas outside of the polygon specified by "poly_verts" are masked. - - "poly_verts" must be a list of tuples of the verticies in the polygon in - counter-clockwise order. - - Returns the matplotlib.patches.PathPatch instance plotted on the figure. - """ - - if ax is None: - ax = plt.gca() - - # Get current plot limits - xlim = ax.get_xlim() - ylim = ax.get_ylim() - - # Verticies of the plot boundaries in clockwise order - bound_verts = [(xlim[0], ylim[0]), (xlim[0], ylim[1]), - (xlim[1], ylim[1]), (xlim[1], ylim[0]), - (xlim[0], ylim[0])] - - # A series of codes (1 and 2) to tell matplotlib whether to draw a line or - # move the "pen" (So that there's no connecting line) - bound_codes = [mpath.Path.MOVETO] + (len(bound_verts) - 1) * [mpath.Path.LINETO] - poly_codes = [mpath.Path.MOVETO] + (len(poly_verts) - 1) * [mpath.Path.LINETO] - - # Plot the masking patch - path = mpath.Path(bound_verts + poly_verts, bound_codes + poly_codes) - patch = mpatches.PathPatch(path, facecolor='white', edgecolor='none') - patch = ax.add_patch(patch) - - # Reset the plot limits to their original extents - ax.set_xlim(xlim) - ax.set_ylim(ylim) - - return patch diff --git a/experiment/meteo_france_data/plot/shifted_color_map.py b/experiment/meteo_france_data/plot/shifted_color_map.py deleted file mode 100644 index dd4ac7d627b5eba0ef982e271a1780955c8f5533..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/plot/shifted_color_map.py +++ /dev/null @@ -1,56 +0,0 @@ -import numpy as np -import matplotlib -import matplotlib.pyplot as plt -from mpl_toolkits.axes_grid1 import AxesGrid - -# from: https://stackoverflow.com/questions/7404116/defining-the-midpoint-of-a-colormap-in-matplotlib/20528097 -def shiftedColorMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): - ''' - Function to offset the "center" of a colormap. Useful for - data with a negative min and positive max and you want the - middle of the colormap's dynamic range to be at zero. - - Input - ----- - cmap : The matplotlib colormap to be altered - start : Offset from lowest point in the colormap's range. - Defaults to 0.0 (no lower offset). Should be between - 0.0 and `midpoint`. - midpoint : The new center of the colormap. Defaults to - 0.5 (no shift). Should be between 0.0 and 1.0. In - general, this should be 1 - vmax / (vmax + abs(vmin)) - For example if your data range from -15.0 to +5.0 and - you want the center of the colormap at 0.0, `midpoint` - should be set to 1 - 5/(5 + 15)) or 0.75 - stop : Offset from highest point in the colormap's range. - Defaults to 1.0 (no upper offset). Should be between - `midpoint` and 1.0. - ''' - cdict = { - 'red': [], - 'green': [], - 'blue': [], - 'alpha': [] - } - - # regular index to compute the colors - reg_index = np.linspace(start, stop, 257) - - # shifted index to match the data - shift_index = np.hstack([ - np.linspace(0.0, midpoint, 128, endpoint=False), - np.linspace(midpoint, 1.0, 129, endpoint=True) - ]) - - for ri, si in zip(reg_index, shift_index): - r, g, b, a = cmap(ri) - - cdict['red'].append((si, r, r)) - cdict['green'].append((si, g, g)) - cdict['blue'].append((si, b, b)) - cdict['alpha'].append((si, a, a)) - - newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict) - plt.register_cmap(cmap=newcmap) - - return newcmap diff --git a/experiment/meteo_france_data/scm_models_data/abstract_study.py b/experiment/meteo_france_data/scm_models_data/abstract_study.py index 23a7a7ec934799fefdc352bec6b7e81cdf94660e..b60f72a660299fc636d843842d08669b9e7b9f09 100644 --- a/experiment/meteo_france_data/scm_models_data/abstract_study.py +++ b/experiment/meteo_france_data/scm_models_data/abstract_study.py @@ -25,7 +25,7 @@ from experiment.meteo_france_data.scm_models_data.scm_constants import ALTITUDES from experiment.meteo_france_data.scm_models_data.visualization.utils import get_km_formatter from extreme_fit.function.margin_function.abstract_margin_function import \ AbstractMarginFunction -from experiment.meteo_france_data.plot.create_shifted_cmap import create_colorbase_axis, \ +from experiment.meteo_france_data.scm_models_data.visualization.create_shifted_cmap import create_colorbase_axis, \ get_shifted_map, get_colors from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_coordinates import \ @@ -369,11 +369,7 @@ class AbstractStudy(object): for coordinate_id, coords_list in cls.idx_to_coords_list.items(): # Retrieve the list of coords (x,y) that define the contour of the massif of id coordinate_id - - # if j == 0: - # mask_outside_polygon(poly_verts=l, ax=ax) # Plot the contour of the massif - coords_list = list(zip(*coords_list)) ax.plot(*coords_list, color='black') diff --git a/experiment/meteo_france_data/scm_models_data/cumulated_study.py b/experiment/meteo_france_data/scm_models_data/safran/cumulated_study.py similarity index 100% rename from experiment/meteo_france_data/scm_models_data/cumulated_study.py rename to experiment/meteo_france_data/scm_models_data/safran/cumulated_study.py diff --git a/experiment/meteo_france_data/scm_models_data/safran/safran.py b/experiment/meteo_france_data/scm_models_data/safran/safran.py index a19b8207edaa24f9a603febf0e323cd395aacded..f2fe8f75f0505aa895fb0e517656dd469ab66989 100644 --- a/experiment/meteo_france_data/scm_models_data/safran/safran.py +++ b/experiment/meteo_france_data/scm_models_data/safran/safran.py @@ -3,7 +3,7 @@ import numpy as np from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy from experiment.meteo_france_data.scm_models_data.abstract_variable import AbstractVariable -from experiment.meteo_france_data.scm_models_data.cumulated_study import CumulatedStudy +from experiment.meteo_france_data.scm_models_data.safran.cumulated_study import CumulatedStudy from experiment.meteo_france_data.scm_models_data.safran.safran_variable import SafranSnowfallVariable, \ SafranRainfallVariable, SafranTemperatureVariable, SafranTotalPrecipVariable diff --git a/experiment/meteo_france_data/plot/create_shifted_cmap.py b/experiment/meteo_france_data/scm_models_data/visualization/create_shifted_cmap.py similarity index 58% rename from experiment/meteo_france_data/plot/create_shifted_cmap.py rename to experiment/meteo_france_data/scm_models_data/visualization/create_shifted_cmap.py index 7f6fc9706d61b705c20c0833ddf5a1c723667ea2..9534f0fcaa92bfa419bc6bf530881fc0f83b719f 100644 --- a/experiment/meteo_france_data/plot/create_shifted_cmap.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/create_shifted_cmap.py @@ -1,3 +1,4 @@ +import matplotlib import matplotlib as mpl import matplotlib.cm as cm import matplotlib.colorbar as cbar @@ -5,7 +6,6 @@ import matplotlib.pyplot as plt import numpy as np from mpl_toolkits.axes_grid1 import make_axes_locatable -from experiment.meteo_france_data.plot.shifted_color_map import shiftedColorMap from extreme_fit.distribution.abstract_params import AbstractParams @@ -66,3 +66,57 @@ def imshow_shifted(ax, gev_param_name, values, visualization_extend, mask_2D=Non masked_array[-1, -1] = value - epsilon # IMPORTANT: Origin for all the plots is at the bottom left corner ax.imshow(masked_array, extent=visualization_extend, cmap=shifted_cmap, origin='lower') + + + +# from: https://stackoverflow.com/questions/7404116/defining-the-midpoint-of-a-colormap-in-matplotlib/20528097 +def shiftedColorMap(cmap, start=0, midpoint=0.5, stop=1.0, name='shiftedcmap'): + ''' + Function to offset the "center" of a colormap. Useful for + data with a negative min and positive max and you want the + middle of the colormap's dynamic range to be at zero. + + Input + ----- + cmap : The matplotlib colormap to be altered + start : Offset from lowest point in the colormap's range. + Defaults to 0.0 (no lower offset). Should be between + 0.0 and `midpoint`. + midpoint : The new center of the colormap. Defaults to + 0.5 (no shift). Should be between 0.0 and 1.0. In + general, this should be 1 - vmax / (vmax + abs(vmin)) + For example if your data range from -15.0 to +5.0 and + you want the center of the colormap at 0.0, `midpoint` + should be set to 1 - 5/(5 + 15)) or 0.75 + stop : Offset from highest point in the colormap's range. + Defaults to 1.0 (no upper offset). Should be between + `midpoint` and 1.0. + ''' + cdict = { + 'red': [], + 'green': [], + 'blue': [], + 'alpha': [] + } + + # regular index to compute the colors + reg_index = np.linspace(start, stop, 257) + + # shifted index to match the data + shift_index = np.hstack([ + np.linspace(0.0, midpoint, 128, endpoint=False), + np.linspace(midpoint, 1.0, 129, endpoint=True) + ]) + + for ri, si in zip(reg_index, shift_index): + r, g, b, a = cmap(ri) + + cdict['red'].append((si, r, r)) + cdict['green'].append((si, g, g)) + cdict['blue'].append((si, b, b)) + cdict['alpha'].append((si, a, a)) + + newcmap = matplotlib.colors.LinearSegmentedColormap(name, cdict) + plt.register_cmap(cmap=newcmap) + + return newcmap diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/__init__.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py deleted file mode 100644 index 30b3417d1ea334048bac50cb326bea55f8f832e3..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py +++ /dev/null @@ -1,144 +0,0 @@ -import os -import os.path as op -from typing import Dict, Tuple - -import matplotlib.pyplot as plt -import pandas as pd - -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ - StudyVisualizer -from root_utils import cached_property, VERSION_TIME, get_display_name_from_object_type - - -class AbstractHypercubeVisualizer(object): - """ - A study visualizer contain some massifs and years. This forms the base DataFrame of the hypercube - Additional index will come from the tuple. - Tuple could contain altitudes, type of snow quantity - """ - - def __init__(self, tuple_to_study_visualizer: Dict[Tuple, StudyVisualizer], - trend_test_class, - nb_data_reduced_for_speed=False, - reduce_strength_array=False, - save_to_file=False, - first_starting_year=None, - last_starting_year=None, - exact_starting_year=None, - verbose=True, - sigma_for_best_year=0.0): - assert sigma_for_best_year >= 0.0 - self.sigma_for_best_year = sigma_for_best_year - self.reduce_strength_array = reduce_strength_array - self.verbose = verbose - self.save_to_file = save_to_file - self.trend_test_class = trend_test_class - self.tuple_to_study_visualizer = tuple_to_study_visualizer # type: Dict[Tuple, StudyVisualizer] - - if isinstance(nb_data_reduced_for_speed, bool): - self.nb_data_for_fast_mode = 7 if nb_data_reduced_for_speed else None - else: - assert isinstance(nb_data_reduced_for_speed, int) - self.nb_data_for_fast_mode = nb_data_reduced_for_speed - - if exact_starting_year is not None: - assert first_starting_year is None and last_starting_year is None - self.first_starting_year, self.last_starting_year = exact_starting_year, exact_starting_year - else: - default_first_starting_year, *_, default_last_starting_year = self.all_potential_starting_years - self.first_starting_year = first_starting_year if first_starting_year is not None else default_first_starting_year - self.last_starting_year = last_starting_year if last_starting_year is not None else default_last_starting_year - # Load starting year - self.starting_years = [year for year in self.all_potential_starting_years - if self.first_starting_year <= year <= self.last_starting_year] - if self.nb_data_for_fast_mode is not None: - self.starting_years = self.starting_years[:self.nb_data_for_fast_mode] - self.last_starting_year = self.starting_years[-1] - - if self.verbose: - print('Hypercube with parameters:') - print('First starting year: {}, Last starting year: {}'.format(self.first_starting_year, - self.last_starting_year)) - print('Starting years:', self.starting_years) - print('Trend test class:', get_display_name_from_object_type(self.trend_test_class)) - - # Main attributes defining the hypercube - - @property - def trend_test_name(self): - return get_display_name_from_object_type(self.trend_test_class) - - @property - def all_potential_starting_years(self): - return self.study_visualizer.starting_years - - def tuple_values(self, idx): - return sorted(set([t[idx] if isinstance(t, tuple) else t for t in self.tuple_to_study_visualizer.keys()])) - - @cached_property - def df_trends_spatio_temporal(self): - return [study_visualizer.df_trend_spatio_temporal(self.trend_test_class, self.starting_years, - self.nb_data_for_fast_mode) - for study_visualizer in self.tuple_to_study_visualizer.values()] - - def _df_hypercube_trend_meta(self, idx) -> pd.DataFrame: - df_spatio_temporal_trend_strength = [e[idx] for e in self.df_trends_spatio_temporal] - return pd.concat(df_spatio_temporal_trend_strength, keys=list(self.tuple_to_study_visualizer.keys()), axis=0) - - @cached_property - def df_hypercube_trend_type(self) -> pd.DataFrame: - return self._df_hypercube_trend_meta(idx=0 - ) - - @cached_property - def df_hypercube_trend_slope_relative_strength(self) -> pd.DataFrame: - return self._df_hypercube_trend_meta(idx=1) - - @cached_property - def df_hypercube_trend_nllh(self) -> pd.DataFrame: - return self._df_hypercube_trend_meta(idx=2) - - @cached_property - def df_hypercube_trend_constant_quantile(self) -> pd.DataFrame: - return self._df_hypercube_trend_meta(idx=3) - - @cached_property - def df_hypercube_trend_mean_same_sign(self) -> pd.DataFrame: - return self._df_hypercube_trend_meta(idx=4) - - @cached_property - def df_hypercube_trend_variance_same_sign(self) -> pd.DataFrame: - return self._df_hypercube_trend_meta(idx=5) - - # Some properties - - @property - def study_title(self): - return self.study.title - - def show_or_save_to_file(self, specific_title='', tight=False, dpi=None): - if self.save_to_file: - main_title, *_ = '_'.join(self.study_title.split()).split('/') - filename = "{}/{}/".format(VERSION_TIME, main_title) - filename += specific_title - filepath = op.join(self.study.result_full_path, filename + '.png') - dirname = op.dirname(filepath) - if not op.exists(dirname): - os.makedirs(dirname, exist_ok=True) - if tight: - plt.savefig(filepath, bbox_inches='tight', pad_inches=+0.03, dpi=1000) - elif dpi is not None: - plt.savefig(filepath, dpi=dpi) - else: - plt.savefig(filepath) - else: - plt.show() - plt.close() - - @property - def study_visualizer(self) -> StudyVisualizer: - return list(self.tuple_to_study_visualizer.values())[0] - - @property - def study(self): - return self.study_visualizer.study diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py deleted file mode 100644 index 52c60ce4e55feafb5f4023f0ab8eadddccb2a46d..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py +++ /dev/null @@ -1,528 +0,0 @@ -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt -from matplotlib.ticker import ScalarFormatter -from scipy.ndimage import gaussian_filter - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.abstract_hypercube_visualizer import \ - AbstractHypercubeVisualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - SCM_STUDY_NAME_TO_COLOR, SCM_STUDY_NAME_TO_ABBREVIATION, SCM_STUDY_CLASS_TO_ABBREVIATION -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ - StudyVisualizer -from experiment.trend_analysis.univariate_test.extreme_trend_test.abstract_gev_trend_test import AbstractGevTrendTest -from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest -from root_utils import get_display_name_from_object_type - -ALTITUDES_XLABEL = 'altitudes' - -STARTING_YEARS_XLABEL = 'starting years' - -from math import log10, floor - - -def round_sig(x, sig=2): - return round(x, sig - int(floor(log10(abs(x)))) - 1) - - -def make_patch_spines_invisible(ax): - ax.set_frame_on(True) - ax.patch.set_visible(False) - for sp in ax.spines.values(): - sp.set_visible(False) - - -class AltitudeHypercubeVisualizer(AbstractHypercubeVisualizer): - - @property - def altitudes(self): - return self.tuple_values(idx=0) - - @property - def display_trend_type_to_style(self): - return self.trend_test_class.display_trend_type_to_style() - - @property - def display_trend_types(self): - return self.display_trend_type_to_style.keys() - - @property - def nb_rows(self): - return 1 - - def ylabel_to_series(self, reduction_function, isin_parameters=None): - return {} - - def trend_type_to_series(self, reduction_function, isin_parameters=None): - # Map each trend type to its serie with percentages - # Define here all the trend type we might need in the results/displays - return {trend_type: self.trend_type_reduction_series(reduction_function=reduction_function, - df_bool=self.df_bool(trend_type, isin_parameters).copy()) - for trend_type in self.trend_types_to_process} - - @property - def trend_types_to_process(self): - return list(self.display_trend_types) + [AbstractUnivariateTest.SIGNIFICATIVE_ALL_TREND] - - def df_bool(self, display_trend_type, isin_parameters=None): - return self.df_hypercube_trend_type.isin(AbstractUnivariateTest.get_real_trend_types(display_trend_type)) - - def trend_type_reduction_series(self, reduction_function, df_bool): - # Reduce df_bool df to a serie s_trend_type_percentage - s_trend_type_percentage = reduction_function(df_bool) - assert isinstance(s_trend_type_percentage, pd.Series) - assert not isinstance(s_trend_type_percentage.index, pd.MultiIndex) - s_trend_type_percentage *= 100 - series = [s_trend_type_percentage] - if self.reduce_strength_array: - # Reduce df_strength to a serie s_trend_strength - df_strength = self.df_hypercube_trend_slope_relative_strength[df_bool] - s_trend_strength = reduction_function(df_strength) - df_constant = self.df_hypercube_trend_constant_quantile[df_bool] - s_trend_constant = reduction_function(df_constant) - series.extend([s_trend_strength, s_trend_constant]) - # Add the mean and the variance anyway - s_trend_mean_sign = reduction_function(self.df_hypercube_trend_mean_same_sign[df_bool]) - s_trend_variance_sign = reduction_function(self.df_hypercube_trend_variance_same_sign[df_bool]) - series.extend([s_trend_mean_sign, s_trend_variance_sign]) - return series - - def subtitle_to_reduction_function(self, reduction_function, level=None, add_detailed_plot=False, subtitle=None): - def reduction_function_with_level(df_bool, **kwargs): - return reduction_function(df_bool, **kwargs) if level is None else reduction_function(df_bool, level, - **kwargs) - - if subtitle is None: - # subtitle = self.study.variable_name[:6] - subtitle = SCM_STUDY_CLASS_TO_ABBREVIATION[type(self.study)] - # Ensure that subtitle does not belong to this dictionary so that the plot will be normal - assert subtitle not in SCM_STUDY_NAME_TO_COLOR - - return {subtitle: reduction_function_with_level} - - def get_title_plot(self, xlabel, ax_idx=None): - labels = ['altitudes', 'starting years', 'massifs'] - assert xlabel in labels, xlabel - if ax_idx == 1: - return '% of change per year for the parameter value' - elif ax_idx == 0: - return '% of trend type' - else: - labels.remove(xlabel) - if xlabel != 'starting years': - labels.remove('starting years') - common_txt = 'averaged on {}'.format(' & '.join(labels)) - common_txt += ' with any starting year between {} and {}'.format(self.first_starting_year, - self.last_starting_year) - return common_txt - - def visualize_trend_test_evolution(self, reduction_function, xlabel, xlabel_values, axes=None, marker='o', - subtitle='', isin_parameters=None, - plot_title=None, idx_reduction=None, - poster_plot=False): - - # Plot in one graph several graph that correspond to the same trend_type - trend_type_to_series = self.trend_type_to_series(reduction_function, isin_parameters) - end_idx = len(list(trend_type_to_series.values())[0]) - axes_for_trend_type = axes[:end_idx] - for ax_idx, ax in enumerate(axes_for_trend_type): - for display_trend_type in self.display_trend_types: - style = self.display_trend_type_to_style[display_trend_type] - values = trend_type_to_series[display_trend_type][ax_idx] - xlabel_values = list(values.index) - values = list(values.values) - ax.plot(xlabel_values, values, style + marker, label=display_trend_type) - - if ax_idx == 0: - # Global information - ax.set_ylabel(self.get_title_plot(xlabel, ax_idx=0)) - if xlabel != STARTING_YEARS_XLABEL: - ax.set_yticks(list(range(0, 101, 20))) - else: - ax.set_ylabel(self.get_title_plot(xlabel, ax_idx=ax_idx)) - - # Common function functions - if xlabel == STARTING_YEARS_XLABEL: - ax.set_xticks(xlabel_values[::3]) - else: - ax.set_xticks(xlabel_values) - ax.set_xlabel(xlabel) - ax.grid() - ax.legend() - if plot_title is not None: - ax.set_title(plot_title) - - # Plot other graphs where there is a single line that do not correspond to trend types - axes_remaining = axes[end_idx:] - ylabel_to_series = self.ylabel_to_series(reduction_function, isin_parameters) - assert len(axes_remaining) == len(ylabel_to_series), '{}, {}'.format(len(axes_remaining), len(ylabel_to_series)) - best_year = np.nan - for ax_idx, (ax, (ylabel, serie)) in enumerate(zip(axes_remaining, ylabel_to_series.items())): - assert isinstance(serie, pd.Series) - xlabel_values = list(serie.index) - values = list(serie.values) - smooth_values = gaussian_filter(values, self.sigma_for_best_year) - argmax_idx = np.argmax(smooth_values) - best_year = xlabel_values[argmax_idx] - if plot_title is not None: - plot_title += ' (max reached in {})'.format(best_year) - - if subtitle in SCM_STUDY_NAME_TO_COLOR: - ax_reversed, color = ax.twinx(), SCM_STUDY_NAME_TO_COLOR[subtitle] - ylabel = 'mean logLik for ' + SCM_STUDY_NAME_TO_ABBREVIATION[subtitle] - ax.plot([], [], label=ylabel, color=color) - linewidth = 10 if poster_plot else None - ax_reversed.plot(xlabel_values, values, label=ylabel, color=color, linewidth=linewidth) - if self.sigma_for_best_year > 0: - ax_reversed.plot(xlabel_values, smooth_values, label=ylabel + ' smooth', color=color, linestyle=':', - linewidth=linewidth) - fontsize = 30 if poster_plot else None - ax_reversed.set_ylabel(ylabel, color=color, fontsize=fontsize, labelpad=-20) - ax_reversed.axvline(x=best_year, color=color, linestyle='--', linewidth=linewidth) - - # Offset the right spine of par2. The ticks and label have already been - # placed on the right by twinx above. - position = 1 + idx_reduction * 0.08 - if idx_reduction > 0: - ax_reversed.spines["right"].set_position(("axes", position)) - # Having been created by twinx, par2 has its frame off, so the line of its - # detached spine is invisible. First, activate the frame but make the patch - # and spines invisible. - make_patch_spines_invisible(ax_reversed) - # Second, show the right spine. - ax_reversed.spines["right"].set_visible(True) - if poster_plot: - # ax_reversed.yaxis.set_major_formatter(FormatStrFormatter('%.1f')) - ax_reversed.yaxis.set_major_formatter(ScalarFormatter(useMathText=True)) - # ax_reversed.tick_params(axis='both', which='major', labelsize=15) - ax_reversed.tick_params(axis='y', which='major', labelsize=25) - # ax.tick_params(axis='x', which='major', labelsize=20) - ax.tick_params(axis='x', which='major', labelsize=25) - - ax_reversed.yaxis.set_ticks([np.round(min(values), 1), np.round(max(values), 1)]) - else: - ax.set_title(plot_title) - # ax.legend() - # Common things to all the graph - if not poster_plot: - ax.set_xlabel(xlabel) - plt.setp(ax.get_yticklabels(), visible=False) - - specific_title = self.specific_title_trend_evolution(subtitle, xlabel, loglik_title=len(axes_remaining) > 0) - - # Figure title - # specific_title += '\n' - # - # trend_types = [AbstractUnivariateTest.ALL_TREND, - # AbstractUnivariateTest.SIGNIFICATIVE_ALL_TREND, - # AbstractUnivariateTest.SIGNIFICATIVE_POSITIVE_TREND, - # AbstractUnivariateTest.SIGNIFICATIVE_NEGATIVE_TREND] - # series = [trend_type_to_series[trend_type][0] for trend_type in trend_types] - # percents = [serie.sum() if xlabel == STARTING_YEARS_XLABEL else serie.mean() for serie in series] - # percents = [np.round(p) for p in percents] - # specific_title += 'Total ' if xlabel == STARTING_YEARS_XLABEL else 'Mean ' - # specific_title += 'all trend {}, all significative trends: {} (+:{} -{})'.format(*percents) - if not poster_plot: - plt.suptitle(specific_title) - - return specific_title, best_year - - def specific_title_trend_evolution(self, subtitle, xlabel, loglik_title=False): - if loglik_title: - specific_title = 'Mean LogLik of the non stationary model' - else: - specific_title = 'Evolution of {} trends'.format(subtitle) - specific_title += ' wrt to the {}'.format(xlabel) - if len(self.altitudes) == 1: - specific_title += ' at altitude={}m'.format(self.altitudes[0]) - return specific_title - - def load_trend_test_evolution_axes(self, nb_rows): - fig, axes = plt.subplots(nb_rows, 1, figsize=self.study_visualizer.figsize, constrained_layout=True) - if not isinstance(axes, np.ndarray): - axes = [axes] - return axes - - def load_trend_test_evolution_axes_with_columns(self, nb_rows, nb_columns): - fig, axes = plt.subplots(nb_rows, nb_columns, figsize=self.study_visualizer.figsize, constrained_layout=True) - if not isinstance(axes, np.ndarray): - axes = [axes] - else: - axes = axes.reshape((nb_rows * nb_columns)) - return axes - - def visualize_trend_test_repartition(self, reduction_function, axes=None, subtitle='', isin_parameters=None, - plot_title=None): - - for i, axes_row in enumerate(axes): - trend_type_to_serie = {k: v[i].replace(0.0, np.nan) for k, v in - self.trend_type_to_series(reduction_function, isin_parameters).items()} - vmax = max([s.max() for s in trend_type_to_serie.values()]) - vmin = min([s.min() for s in trend_type_to_serie.values()]) - vmax = max(vmax, 0.01) - if vmin == vmax: - epislon = 0.001 * vmax - vmin -= epislon - vmax += epislon - - if i == 0: - vmin, vmax = 0, 100 - for ax, display_trend_type in zip(axes_row, self.display_trend_types): - serie = trend_type_to_serie[display_trend_type] - massif_to_value = dict(serie) - cmap = self.trend_test_class.get_cmap_from_trend_type(display_trend_type) - self.study.visualize_study(ax, massif_to_value, show=False, cmap=cmap, label=display_trend_type, - vmax=vmax, vmin=vmin) - if plot_title is not None: - ax.set_title(plot_title) - row_title = self.get_title_plot(xlabel='massifs', ax_idx=i) - StudyVisualizer.clean_axes_write_title_on_the_left(axes_row, row_title, left_border=None) - - # Global information - title = 'Repartition of {} trends (significative or not) with {}'.format(subtitle, self.trend_test_name) - title += '\n ' + self.get_title_plot('massifs') - plt.suptitle(title) - - return title - - def visualize_trend_test_repartition_poster(self, reduction_function, axes=None, subtitle='', isin_parameters=None, - plot_title=None, - poster_plot=False, - write_text_on_massif=True, - display_trend_color=True): - trend_type_to_serie = {k: v[0].replace(0.0, np.nan) for k, v in - self.trend_type_to_series(reduction_function, isin_parameters).items()} - - massif_to_color = {} - add_text = self.nb_rows > 1 - massif_to_year = {} - massif_to_strength = {} - massif_to_constant = {} - massif_to_mean_difference_same_sign = {} - massif_to_variance_difference_same_sign = {} - poster_trend_types = [AbstractUnivariateTest.SIGNIFICATIVE_POSITIVE_TREND, - AbstractUnivariateTest.SIGNIFICATIVE_NEGATIVE_TREND, - AbstractUnivariateTest.NEGATIVE_TREND, - AbstractUnivariateTest.POSITIVE_TREND, - ][:] - for display_trend_type, style in self.display_trend_type_to_style.items(): - if display_trend_type in poster_trend_types: - color = style[:-1] - serie = trend_type_to_serie[display_trend_type] - massif_to_color_for_trend_type = {k: color for k, v in dict(serie).items() if not np.isnan(v)} - massif_to_color.update(massif_to_color_for_trend_type) - if add_text: - if self.reduce_strength_array: - massif_to_value_for_trend_type = [{k: v for k, v in - self.trend_type_to_series(reduction_function, - isin_parameters)[ - display_trend_type][i].items() - if k in massif_to_color_for_trend_type} for i in [1, 2]] - massif_to_strength.update(massif_to_value_for_trend_type[0]) - massif_to_constant.update(massif_to_value_for_trend_type[1]) - mean_idx, variance_idx = 3, 4 - else: - mean_idx, variance_idx = 1, 2 - - massif_to_value_for_trend_type = {k: int(v) for k, v in - self.trend_type_to_series(reduction_function, - isin_parameters)[ - display_trend_type][3].items() - if k in massif_to_color_for_trend_type} - massif_to_year.update(massif_to_value_for_trend_type) - - # Add the mean and variance sign anyway - massif_to_value_for_trend_type = [{k: v for k, v in - self.trend_type_to_series(reduction_function, - isin_parameters)[ - display_trend_type][i].items() - if k in massif_to_color_for_trend_type} for i in - [mean_idx, variance_idx]] - massif_to_mean_difference_same_sign.update(massif_to_value_for_trend_type[0]) - massif_to_variance_difference_same_sign.update(massif_to_value_for_trend_type[1]) - # Compute massif to hatch boolean - massif_name_to_hatch_boolean_list = { - massif: [massif_to_mean_difference_same_sign[massif] == 1.0, - massif_to_variance_difference_same_sign[massif] == 1.0] - for massif in massif_to_color.keys() - } - - # Compute massif_to_value - if self.reduce_strength_array: - massif_name_to_value = {m: "{} {}{}".format( - int(massif_to_constant[m]), - "+" if massif_to_strength[m] > 0 else "", - round(massif_to_strength[m], 1), - AbstractGevTrendTest.nb_years_for_quantile_evolution) - for m in massif_to_strength} - else: - massif_name_to_value = {k: "$t_0=$" + str(int(v)) if display_trend_color else v for k, v in massif_to_year.items()} - - title = self.set_trend_test_reparition_title(subtitle, set=not poster_plot, first_title=display_trend_color) - - if display_trend_color: - self.study.visualize_study(None, massif_name_to_color=massif_to_color, show=False, - show_label=False, scaled=True, add_text=write_text_on_massif, - massif_name_to_value=massif_name_to_value, - fontsize=4, - axis_off=True, - massif_name_to_hatch_boolean_list=massif_name_to_hatch_boolean_list, - ) - else: - VMIN = 1957 - VMAX = 1998 - assert VMIN < self.first_starting_year - assert VMAX > self.last_starting_year - self.study.visualize_study(None, show=False, - show_label=False, scaled=True, add_text=False, - massif_name_to_value=massif_name_to_value, - cmap=plt.cm.GnBu, - add_colorbar=True, - vmin=VMIN, - vmax=VMAX) - - - - - return title - - def set_trend_test_reparition_title(self, subtitle, set=True, first_title=True): - # Global information - title = 'Repartition of {} trends'.format(subtitle) - if self.study.has_orientation: - title += ' orientation {} degrees'.format(int(self.study.orientation)) - title += ' at altitude={}m \nfor the starting_year={}'.format(self.altitudes[0], self.first_starting_year) - if len(self.starting_years) > 1: - title += ' until starting_year={}'.format(self.last_starting_year) - title += ' with {} test'.format(get_display_name_from_object_type(self.trend_test_class)) - if first_title: - title += '\nEvolution of the Eurocode quantile every {} years'.format( - AbstractGevTrendTest.nb_years_for_quantile_evolution) - else: - title += '\nStarting years' - if set: - plt.suptitle(title) - return title - - def load_axes_for_trend_test_repartition(self, nb_rows, nb_columns=None): - if nb_columns is None: - nb_columns = len(self.display_trend_type_to_style) - fig, axes = plt.subplots(nb_rows, nb_columns, figsize=self.study_visualizer.figsize) - if isinstance(axes, np.ndarray): - axes = axes.reshape((nb_rows, nb_columns)) - return axes - - @property - def altitude_index_level(self): - return 0 - - @property - def massif_index_level(self): - return 1 - - @staticmethod - def year_reduction(df, **kwargs): - # Take the mean with respect to all the first axis indices - return df.mean(axis=0) - - def visualize_year_trend_test(self, axes=None, marker='o', add_detailed_plots=False, plot_title=None, - isin_parameters=None, - show_or_save_to_file=True, - subtitle_specified=None, - poster_plot=False): - if axes is None: - axes = self.load_trend_test_evolution_axes(self.nb_rows) - else: - assert len(axes) == self.nb_rows - - results = [] - for idx_reduction, (subtitle, reduction_function) in enumerate( - self.subtitle_to_reduction_function(self.year_reduction, - add_detailed_plot=add_detailed_plots, - subtitle=subtitle_specified).items()): - specific_title, best_year = self.visualize_trend_test_evolution( - reduction_function=reduction_function, - xlabel=STARTING_YEARS_XLABEL, - xlabel_values=self.starting_years, axes=axes, - marker=marker, - subtitle=subtitle, - isin_parameters=isin_parameters, - plot_title=plot_title, - idx_reduction=idx_reduction, - poster_plot=poster_plot - ) - results.append((specific_title, best_year, subtitle)) - if show_or_save_to_file: - last_specific_title = results[-1][0] - self.show_or_save_to_file(specific_title=last_specific_title, - ) - return results - - @staticmethod - def index_reduction(df, level): - # Take the sum with respect to the years, replace any missing data with np.nan - df = df.any(axis=1) - # Take the mean with respect to the level of interest - return df.mean(level=level) - - def visualize_altitude_trend_test(self, axes=None, marker='o', add_detailed_plots=False, plot_title=None, - isin_parameters=None, - show_or_save_to_file=True): - if axes is None: - axes = self.load_trend_test_evolution_axes(self.nb_rows) - else: - assert len(axes) == self.nb_rows - - last_title = '' - for subtitle, reduction_function in self.subtitle_to_reduction_function(self.index_reduction, - level=self.altitude_index_level, - add_detailed_plot=add_detailed_plots).items(): - last_title = self.visualize_trend_test_evolution(reduction_function=reduction_function, - xlabel=ALTITUDES_XLABEL, - xlabel_values=self.altitudes, axes=axes, marker=marker, - subtitle=subtitle, isin_parameters=isin_parameters, - plot_title=plot_title) - if show_or_save_to_file: - self.show_or_save_to_file(specific_title=last_title) - return last_title - - def visualize_massif_trend_test(self, axes=None, add_detailed_plots=False, plot_title=None, - isin_parameters=None, - show_or_save_to_file=True): - if axes is None: - axes = self.load_axes_for_trend_test_repartition(self.nb_rows) - else: - assert len(axes) == self.nb_rows - - last_title = '' - for subtitle, reduction_function in self.subtitle_to_reduction_function(self.index_reduction, - level=self.massif_index_level, - add_detailed_plot=add_detailed_plots).items(): - last_title = self.visualize_trend_test_repartition(reduction_function, axes, subtitle=subtitle, - isin_parameters=isin_parameters, - plot_title=plot_title) - if show_or_save_to_file: - self.show_or_save_to_file(specific_title=last_title) - - return last_title - - def visualize_massif_trend_test_one_altitude(self, axes=None, add_detailed_plots=False, plot_title=None, - isin_parameters=None, - show_or_save_to_file=True, - poster_plot=False, - write_text_on_massif=True, - display_trend_color=True): - last_title = '' - for subtitle, reduction_function in self.subtitle_to_reduction_function(self.index_reduction, - level=self.massif_index_level, - add_detailed_plot=add_detailed_plots).items(): - last_title = self.visualize_trend_test_repartition_poster(reduction_function, axes, subtitle=subtitle, - isin_parameters=isin_parameters, - plot_title=plot_title, - poster_plot=poster_plot, - write_text_on_massif=write_text_on_massif, - display_trend_color=display_trend_color) - if show_or_save_to_file: - self.show_or_save_to_file(specific_title=last_title, dpi=1000, tight=poster_plot) - - return last_title diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer_extended.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer_extended.py deleted file mode 100644 index 3e1607a4e43bd4e7b68cbd527704213ef6858c6f..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer_extended.py +++ /dev/null @@ -1,148 +0,0 @@ -from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.abstract_hypercube_visualizer import \ - AbstractHypercubeVisualizer -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ - AltitudeHypercubeVisualizer -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer, AltitudeHypercubeVisualizerBis, AltitudeHypercubeVisualizerWithoutTrendType -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.quantity_altitude_visualizer import \ - QuantityAltitudeHypercubeVisualizer - - -class AltitudeHypercubeVisualizerExtended(AltitudeHypercubeVisualizer): - - def df_bool(self, display_trend_type, isin_parameters=None): - df_bool = super().df_bool(display_trend_type) - # Slice a part of the array - df_bool = self.isin_slicing(df_bool, isin_parameters) - return df_bool - - def isin_slicing(self, df, isin_parameters): - if isin_parameters is not None: - assert isinstance(isin_parameters, list) - for isin_parameter in isin_parameters: - transpose, values, level = isin_parameter - if transpose: - df = df.transpose() - ind = df.index.isin(values=values, level=level) - res = df.loc[ind].copy() - df = res.transpose() if transpose else res - return df - - def _visualize_meta(self, visualization_function, loading_function, name_to_isin_parameters=None, - multiplication_factor_column=None, add_detailed_plot=False): - assert name_to_isin_parameters is not None, 'this method should not be called directly' - - if multiplication_factor_column is None: - multiplication_factor_row = len(name_to_isin_parameters) - all_axes = loading_function(self.nb_rows * multiplication_factor_row) - multiplication_factor = multiplication_factor_row - else: - multiplication_factor_row = len(name_to_isin_parameters) // multiplication_factor_column - multiplication_factor = multiplication_factor_row * multiplication_factor_column - all_axes = loading_function(self.nb_rows * multiplication_factor_row, multiplication_factor_column) - specific_title = '' - for j, (name, isin_parameters) in enumerate(name_to_isin_parameters.items()): - axes = all_axes[j::multiplication_factor] - specific_title = visualization_function(axes, plot_title=name, - isin_parameters=isin_parameters, - show_or_save_to_file=False, - add_detailed_plots=add_detailed_plot) - self.show_or_save_to_file(specific_title=specific_title) - - # Altitude trends - - def _visualize_altitude_trend_test(self, name_to_isin_parameters=None): - return self._visualize_meta(visualization_function=self.visualize_altitude_trend_test, - loading_function=self.load_trend_test_evolution_axes, - name_to_isin_parameters=name_to_isin_parameters) - - def visualize_altitute_trend_test_by_regions(self): - return self._visualize_altitude_trend_test(name_to_isin_parameters=self.region_name_to_isin_parameters) - - @property - def region_name_to_isin_parameters(self): - return {region_name: [(False, values, self.massif_index_level)] - for region_name, values in AbstractExtendedStudy.region_name_to_massif_names.items()} - - # Massif trends - - def _visualize_massif_trend_test(self, name_to_isin_parameters=None): - return self._visualize_meta(visualization_function=self.visualize_massif_trend_test, - loading_function=self.load_axes_for_trend_test_repartition, - name_to_isin_parameters=name_to_isin_parameters) - - def visualize_massif_trend_test_by_altitudes(self): - return self._visualize_massif_trend_test(name_to_isin_parameters=self.altitude_band_name_to_isin_parameters) - - @property - def altitude_band_name_to_values(self): - return { - '900m <= alti <= 3000m': self.altitudes, - '900m <= alti <= 1800m': [900, 1200, 1500, 1800], - '2100m <= alti <= 3000m': [2100, 2400, 2700, 3000], - } - - # altitude_band = 1000 - # group_idxs = [a // altitude_band for a in self.altitudes] - # altitude_band_name_to_values = {'All altitudes': self.altitudes} - # for group_idx in set(group_idxs): - # values = [a for a, i in zip(self.altitudes, group_idxs) if i == group_idx] - # altitude_band_name = '{}m <= altitude <={}m'.format(group_idx * altitude_band, - # (group_idx + 1) * altitude_band) - # altitude_band_name_to_values[altitude_band_name] = values - # return altitude_band_name_to_values - - @property - def altitude_band_name_to_isin_parameters(self): - return {altitude_band_name: [(False, values, self.altitude_index_level)] - for altitude_band_name, values in self.altitude_band_name_to_values.items()} - - # Year trends - - @property - def massif_name_and_altitude_band_name_to_isin_parameters(self): - d = {} - for massif_name, isin_parameters1 in self.region_name_to_isin_parameters.items(): - for altitude_band_name, isin_parameters2 in self.altitude_band_name_to_isin_parameters.items(): - name = massif_name + ' ' + altitude_band_name - isin_parameters = isin_parameters1 + isin_parameters2 - d[name] = isin_parameters - return d - - def vsualize_year_trend_by_regions_and_altitudes(self, add_detailed_plot=False): - return self._visualize_meta(visualization_function=self.visualize_year_trend_test, - loading_function=self.load_trend_test_evolution_axes_with_columns, - name_to_isin_parameters=self.massif_name_and_altitude_band_name_to_isin_parameters, - multiplication_factor_column=len(self.altitude_band_name_to_isin_parameters), - add_detailed_plot=add_detailed_plot) - - -class AltitudeHypercubeVisualizerWithoutTrendExtended(AltitudeHypercubeVisualizerExtended, - AltitudeHypercubeVisualizerWithoutTrendType): - - def df_loglikelihood(self, isin_parameters=None): - df = super().df_loglikelihood() - df = self.isin_slicing(df=df, isin_parameters=isin_parameters) - return df - - -# Extension - -class AltitudeHypercubeVisualizerBisExtended(AltitudeHypercubeVisualizerExtended, AltitudeHypercubeVisualizerBis): - pass - - -class AltitudeYearHypercubeVisualizerExtended(AltitudeHypercubeVisualizerExtended, Altitude_Hypercube_Year_Visualizer): - pass - - -# Quantity hypercube - -class QuantityHypercubeWithoutTrend(AltitudeHypercubeVisualizerWithoutTrendType, QuantityAltitudeHypercubeVisualizer): - pass - - -class QuantityHypercubeWithoutTrendExtended(AltitudeHypercubeVisualizerWithoutTrendExtended, - QuantityHypercubeWithoutTrend): - pass diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_year_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_year_hypercube_visualizer.py deleted file mode 100644 index 8b0ed5161c57826f40c923127f1ce067e717710d..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_year_hypercube_visualizer.py +++ /dev/null @@ -1,52 +0,0 @@ -import numpy as np - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ - AltitudeHypercubeVisualizer - - -class AltitudeHypercubeVisualizerWithoutTrendType(AltitudeHypercubeVisualizer): - - def trend_type_to_series(self, reduction_function, isin_parameters=None): - return {trend_type: [] for trend_type in self.trend_types_to_process} - - def df_loglikelihood(self, isin_parameters=None): - return -self.df_hypercube_trend_nllh - - def ylabel_to_series(self, reduction_function, isin_parameters=None): - return {'mean LogLik': reduction_function(self.df_loglikelihood(isin_parameters=isin_parameters))} - - -class AltitudeHypercubeVisualizerBis(AltitudeHypercubeVisualizer): - - @staticmethod - def index_reduction(df, level, **kwargs): - replace_zero_with_nan = kwargs.get('year_visualization') is not None - # Take the sum with respect to the years, replace any missing data with np.nan - if replace_zero_with_nan: - df = df.sum(axis=1).replace(0.0, np.nan) - else: - df = df.sum(axis=1) - # Take the mean with respect to the level of interest - return df.mean(level=level) - - -class Altitude_Hypercube_Year_Visualizer(AltitudeHypercubeVisualizerBis): - - def get_title_plot(self, xlabel, ax_idx=None): - if ax_idx == self.nb_rows - 1: - return 'mean starting year' - return super().get_title_plot(xlabel, ax_idx) - - @property - def nb_rows(self): - return super().nb_rows + 1 - - def trend_type_reduction_series(self, reduction_function, df_bool): - series = super().trend_type_reduction_series(reduction_function, df_bool) - # Create df argmax - df = df_bool.copy() - df = (df * df.columns)[df_bool] - # Reduce and append - serie = reduction_function(df, year_visualization=True) - series.append(serie) - return series diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/__init__.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_one_altitudes.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_one_altitudes.py deleted file mode 100644 index bd461db47012ab2142b0147e853e20e732674e6f..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_one_altitudes.py +++ /dev/null @@ -1,65 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \ - AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ - load_altitude_visualizer, load_quantity_visualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - SCM_STUDIES -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevLocationTrendTest - - -def get_fast_parameters(altitude=1800): - save_to_file = False - only_first_one = False - nb_data_reduced_for_speed = 4 - altitudes = [altitude] - last_starting_year = None - trend_test_class = GevLocationTrendTest - return altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class - - -def get_fast_altitude_visualizer(altitude_hypercube_class, altitude=1800, study_class=SafranSnowfall, exact_year=None): - altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_fast_parameters(altitude=altitude) - visualizer = load_altitude_visualizer(altitude_hypercube_class, altitudes, last_starting_year, - nb_data_reduced_for_speed, only_first_one, save_to_file, [study_class], - trend_test_class, exact_starting_year=exact_year) - return visualizer - - -def get_fast_quantity_visualizer(quantity_hypercube_class, altitude=1800, study_classes=None): - altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_fast_parameters(altitude=altitude) - if study_classes is None: - study_classes = SCM_STUDIES[:2] - visualizer = load_quantity_visualizer(quantity_hypercube_class, altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, - save_to_file, study_classes, trend_test_class) - return visualizer - - -def main_mean_log_likelihood_poster_1(): - # Simply the main graph - res = get_fast_quantity_visualizer(QuantityHypercubeWithoutTrend).visualize_year_trend_test(add_detailed_plots=True, poster_plot=True) - # get_fast_quantity_visualizer(QuantityHypercubeWithoutTrendExtended).vsualize_year_trend_by_regions_and_altitudes( - # add_detailed_plot=True) - # get_fast_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendExtended).vsualize_year_trend_by_regions_and_altitudes() - - -def main_percentage_trend_poster_2(): - visualizer = get_fast_altitude_visualizer(AltitudeHypercubeVisualizerBisExtended, exact_year=1958) - # visualizer.vsualize_year_trend_by_regions_and_altitudes() - # visualizer.visualize_massif_trend_test_by_altitudes() - visualizer.visualize_massif_trend_test_one_altitude() - # visualizer.visualize_altitute_trend_test_by_regions() - - -def main_run(): - main_mean_log_likelihood_poster_1() - # main_percentage_trend_poster_2() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py deleted file mode 100644 index d29f20a20128453559ecd5f71e822dd59050c2cd..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py +++ /dev/null @@ -1,66 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \ - AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ - load_altitude_visualizer, load_quantity_visualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - ALL_ALTITUDES, SCM_STUDIES -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevLocationTrendTest - - -def get_fast_parameters(altitude=None): - save_to_file = False - only_first_one = False - nb_data_reduced_for_speed = 4 - if altitude is not None: - altitudes = [altitude] - else: - altitudes = [ALL_ALTITUDES[3], ALL_ALTITUDES[-7]] - last_starting_year = None - trend_test_class = GevLocationTrendTest - return altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class - - -def get_fast_altitude_visualizer(altitude_hypercube_class): - altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_fast_parameters() - study_classes = SCM_STUDIES[:1] - visualizer = load_altitude_visualizer(altitude_hypercube_class, altitudes, last_starting_year, - nb_data_reduced_for_speed, only_first_one, save_to_file, study_classes, - trend_test_class) - return visualizer - - -def get_fast_quantity_visualizer(quantity_hypercube_class): - altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_fast_parameters() - study_classes = SCM_STUDIES[:2] - visualizer = load_quantity_visualizer(quantity_hypercube_class, altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, - save_to_file, study_classes, trend_test_class) - return visualizer - - -def main_mean_log_likelihood(): - # Simply the main graph - get_fast_quantity_visualizer(QuantityHypercubeWithoutTrend).visualize_year_trend_test(add_detailed_plots=True) - # get_fast_quantity_visualizer(QuantityHypercubeWithoutTrendExtended).vsualize_year_trend_by_regions_and_altitudes( - # add_detailed_plot=True) - # get_fast_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendExtended).vsualize_year_trend_by_regions_and_altitudes() - - -def main_percentage_trend(): - visualizer = get_fast_altitude_visualizer(AltitudeHypercubeVisualizerBisExtended) - visualizer.vsualize_year_trend_by_regions_and_altitudes() - visualizer.visualize_massif_trend_test_by_altitudes() - visualizer.visualize_altitute_trend_test_by_regions() - - -def main_run(): - main_mean_log_likelihood() - # main_percentage_trend() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py deleted file mode 100644 index 679e5d751a241d6be7683653683b8cb5c090cc99..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py +++ /dev/null @@ -1,75 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \ - AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ - load_altitude_visualizer, load_quantity_visualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - ALL_ALTITUDES, SCM_STUDIES -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevLocationTrendTest - - -def get_full_parameters(altitude=None, offset_starting_year=10): - save_to_file = True - only_first_one = False - nb_data_reduced_for_speed = False - if altitude is not None: - altitudes = [altitude] - else: - altitudes = ALL_ALTITUDES[3:-6] - first_starting_year = 1959 - last_starting_year = 2019 - offset_starting_year - trend_test_class = GevLocationTrendTest - return altitudes, first_starting_year, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class - - -def get_full_altitude_visualizer(altitude_hypercube_class, study_classes, exact_starting_year=None, altitude=None): - altitudes, first_starting_year, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_full_parameters(altitude=altitude) - if exact_starting_year is not None: - last_starting_year = None - visualizer = load_altitude_visualizer(altitude_hypercube_class, altitudes, last_starting_year, - nb_data_reduced_for_speed, only_first_one, save_to_file, study_classes, - trend_test_class, exact_starting_year=exact_starting_year, first_starting_year=first_starting_year) - return visualizer - - -def get_full_quantity_visualizer(quantity_hypercube_class, altitude=None, study_classes=None): - altitudes, first_starting_year, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_full_parameters(altitude=altitude) - if study_classes is None: - study_classes = SCM_STUDIES[:3] - visualizer = load_quantity_visualizer(quantity_hypercube_class, altitudes, last_starting_year, - nb_data_reduced_for_speed, only_first_one, - save_to_file, study_classes, trend_test_class) - return visualizer - - -def main_mean_log_likelihood(): - # Main plot - get_full_quantity_visualizer(QuantityHypercubeWithoutTrend).visualize_year_trend_test(add_detailed_plots=True) - # Detailed plot - # get_full_quantity_visualizer(QuantityHypercubeWithoutTrendExtended).vsualize_year_trend_by_regions_and_altitudes( - # add_detailed_plot=True) - - - # get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendExtended).vsualize_year_trend_by_regions_and_altitudes() - - -def main_percentage_trend(): - for study_class in SCM_STUDIES: - study_classees = [study_class] - visualizer = get_full_altitude_visualizer(AltitudeHypercubeVisualizerBisExtended, exact_starting_year=1981, - study_classes=study_classees) - visualizer.visualize_massif_trend_test_by_altitudes() - visualizer.visualize_altitute_trend_test_by_regions() - - -def main_run(): - main_mean_log_likelihood() - # main_percentage_trend() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py deleted file mode 100644 index 011477f7efff572b3f60859ccaf1f8158e33d272..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py +++ /dev/null @@ -1,65 +0,0 @@ -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \ - QuantityHypercubeWithoutTrend, AltitudeHypercubeVisualizerBisExtended, \ - AltitudeHypercubeVisualizerWithoutTrendExtended -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.main_files.main_fast_hypercube_one_altitudes import \ - get_fast_parameters, get_fast_quantity_visualizer, get_fast_altitude_visualizer -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.main_files.main_full_hypercube import \ - get_full_quantity_visualizer, get_full_altitude_visualizer -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ - load_altitude_visualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - SCM_STUDIES, altitude_massif_name_and_study_class_for_poster, SCM_STUDIES_NAMES, SCM_STUDY_NAME_TO_SCM_STUDY -from root_utils import get_display_name_from_object_type - -POSTER_ALTITUDES = [900, 1800, 2700] - - -def fast_poster(): - for altitude in POSTER_ALTITUDES[:1]: - study_classes = SCM_STUDIES[:2] - # The QuantityHypercubeWithoutTrend object is used to have one single plot with all the results - results = get_fast_quantity_visualizer(QuantityHypercubeWithoutTrend, - altitude=altitude, - study_classes=study_classes).visualize_year_trend_test( - add_detailed_plots=True) - study_class_to_year = dict(zip(study_classes, [t[1] for t in results])) - for study_class, exact_year in study_class_to_year.items(): - altitudes, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, trend_test_class = get_fast_parameters( - altitude=altitude) - spatial_visualizer = load_altitude_visualizer(AltitudeHypercubeVisualizerBisExtended, altitudes, - last_starting_year, - nb_data_reduced_for_speed, only_first_one, save_to_file, - [study_class], - trend_test_class, - exact_starting_year=exact_year) - spatial_visualizer.visualize_massif_trend_test_one_altitude() - - -def full_poster(): - for altitude in POSTER_ALTITUDES[:]: - study_classes = SCM_STUDIES[:] - visualizer = get_full_quantity_visualizer(QuantityHypercubeWithoutTrend, altitude=altitude, - study_classes=study_classes) - results = visualizer.visualize_year_trend_test( - add_detailed_plots=True) - study_name_to_year = dict(zip(visualizer.quantities, [t[1] for t in results])) - for study_name, exact_year in study_name_to_year.items(): - print(study_name, exact_year) - study_class = SCM_STUDY_NAME_TO_SCM_STUDY[study_name] - spatial_visualizer = get_full_altitude_visualizer(AltitudeHypercubeVisualizerBisExtended, [study_class], - exact_starting_year=exact_year, altitude=altitude) - spatial_visualizer.visualize_massif_trend_test_one_altitude() - - -def example_for_the_starting_years(): - for altitude, massif_name, study_class in altitude_massif_name_and_study_class_for_poster[:]: - visualizer = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendExtended, altitude=altitude, - study_classes=[study_class]) - isin_parameters = [(False, [massif_name], visualizer.massif_index_level)] - visualizer.visualize_year_trend_test(isin_parameters=isin_parameters, - subtitle_specified=get_display_name_from_object_type(study_class)) - - -if __name__ == '__main__': - # full_poster() - example_for_the_starting_years() diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/quantity_altitude_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/quantity_altitude_visualizer.py deleted file mode 100644 index d668b5c551e55cbad7ebc102949234e1ffbee3e0..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/quantity_altitude_visualizer.py +++ /dev/null @@ -1,52 +0,0 @@ -from collections import OrderedDict - -import pandas as pd - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ - AltitudeHypercubeVisualizer - - -class QuantityAltitudeHypercubeVisualizer(AltitudeHypercubeVisualizer): - - @property - def study_title(self): - return 'Quantity Altitude Study' - - def subtitle_to_reduction_function(self, reduction_function, level=None, add_detailed_plot=False, subtitle=None): - def get_function_from_tuple(tuple_for_axis_0): - def f(df: pd.DataFrame): - # Loc with a tuple with respect the axis 0 - df = df.loc[tuple_for_axis_0, :].copy() - # Apply the reduction function - s = reduction_function(df) if level is None else reduction_function(df, level - 1) - return s - return f - - # Add the detailed plot, taken by loc with respect to the first index - subtitle_to_reduction_function = OrderedDict() - if add_detailed_plot: - tuples_axis_0 = self.tuple_values(idx=0) - for tuple_axis_0 in tuples_axis_0: - subtitle_to_reduction_function[tuple_axis_0] = get_function_from_tuple(tuple_axis_0) - # Add the super plot at the last rank - subtitle_to_reduction_function.update(super().subtitle_to_reduction_function(reduction_function, - level, add_detailed_plot, - 'global')) - - return subtitle_to_reduction_function - - @property - def quantities(self): - return self.tuple_values(idx=0) - - @property - def altitudes(self): - return self.tuple_values(idx=1) - - @property - def altitude_index_level(self): - return 1 - - @property - def massif_index_level(self): - return 2 diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py deleted file mode 100644 index 5b7fbcb94e2986f316f81c40743ee36570f67099..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py +++ /dev/null @@ -1,54 +0,0 @@ -from collections import OrderedDict -from itertools import product - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \ - AltitudeHypercubeVisualizer -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.quantity_altitude_visualizer import \ - QuantityAltitudeHypercubeVisualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - study_iterator_global -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ - StudyVisualizer -from root_utils import get_display_name_from_object_type - - -def load_quantity_visualizer(quantity_hypercube_class, altitudes, last_starting_year, nb_data_reduced_for_speed, - only_first_one, - save_to_file, study_classes, trend_test_class): - visualizers = [StudyVisualizer(study, temporal_non_stationarity=True, verbose=False, multiprocessing=True) - for study in study_iterator_global(study_classes=study_classes, only_first_one=only_first_one, - altitudes=altitudes)] - study_classes_str = [get_display_name_from_object_type(c) for c in study_classes] - quantity_altitude_tuples = list(product(study_classes_str, altitudes)) - quantity_altitude_to_visualizer = OrderedDict(zip(quantity_altitude_tuples, visualizers)) - visualizer = quantity_hypercube_class(quantity_altitude_to_visualizer, - save_to_file=save_to_file, - trend_test_class=trend_test_class, - nb_data_reduced_for_speed=nb_data_reduced_for_speed, - last_starting_year=last_starting_year) - assert isinstance(visualizer, QuantityAltitudeHypercubeVisualizer) - return visualizer - - -def load_altitude_visualizer(altitude_hypercube_class, altitudes, last_starting_year, nb_data_reduced_for_speed, - only_first_one, save_to_file, study_classes, trend_test_class - , exact_starting_year=None, first_starting_year=1958, - orientations=None, - verbose=True): - visualizers = [StudyVisualizer(study, temporal_non_stationarity=True, verbose=False, multiprocessing=True) - for study in study_iterator_global(study_classes=study_classes, only_first_one=only_first_one, - altitudes=altitudes, - orientations=orientations, - verbose=verbose)] - altitude_to_visualizer = OrderedDict(zip(altitudes, visualizers)) - visualizer = altitude_hypercube_class(altitude_to_visualizer, - save_to_file=save_to_file, - trend_test_class=trend_test_class, - nb_data_reduced_for_speed=nb_data_reduced_for_speed, - last_starting_year=last_starting_year, - first_starting_year=first_starting_year, - exact_starting_year=exact_starting_year, - verbose=verbose, - ) - assert isinstance(visualizer, AltitudeHypercubeVisualizer) - return visualizer diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/main_study_visualizer.py similarity index 99% rename from experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py rename to experiment/meteo_france_data/scm_models_data/visualization/main_study_visualizer.py index 2629457be4eebffa0d47b39b49d9f7ff03ccc613..b8d9cc8ba70a90df8d363c7ef7a1112a99c7178b 100644 --- a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py +++ b/experiment/meteo_france_data/scm_models_data/visualization/main_study_visualizer.py @@ -2,7 +2,7 @@ import time from typing import List from experiment.meteo_france_data.scm_models_data.crocus.crocus_variables import CrocusDensityVariable -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer from papers.exceeding_snow_loads.discussion_data_comparison_with_eurocode.crocus_study_comparison_with_eurocode import \ CrocusDifferenceSnowLoad, \ diff --git a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/__init__.py b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/main_studies_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/main_studies_visualizer.py deleted file mode 100644 index 4b6eb7e46ea88f12f26901c47435a0f277844d1d..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/main_studies_visualizer.py +++ /dev/null @@ -1,68 +0,0 @@ -import time -from experiment.trend_analysis.abstract_score import MannKendall, WeigthedScore, MeanScore, MedianScore -from experiment.trend_analysis.univariate_test.extreme_trend_test.abstract_gev_trend_test import GevLocationChangePointTest, \ - GevScaleChangePointTest, GevShapeChangePointTest -from experiment.trend_analysis.univariate_test.abstract_univariate_test import MannKendallTrendTest -from experiment.meteo_france_data.scm_models_data.safran.safran import ExtendedSafranPrecipitation -from experiment.meteo_france_data.scm_models_data.visualization import Studies -from experiment.meteo_france_data.scm_models_data.visualization import StudiesVisualizer, \ - AltitudeVisualizer -from experiment.meteo_france_data.scm_models_data.visualization import ALL_ALTITUDES, \ - study_iterator_global, SCM_STUDIES - -from experiment.meteo_france_data.scm_models_data.visualization import StudyVisualizer -from collections import OrderedDict - - -def normal_visualization(): - for study_type in [ExtendedSafranPrecipitation]: - extended_studies = Studies(study_type) - studies_visualizer = StudiesVisualizer(extended_studies) - studies_visualizer.mean_as_a_function_of_altitude(region_only=True) - - -def altitude_trends(): - save_to_file = True - only_first_one = False - # altitudes that have 20 massifs at least - altitudes = ALL_ALTITUDES[3:-6] - # altitudes = ALL_ALTITUDES[:2] - for study_class in SCM_STUDIES[:]: - for score_class in [MedianScore, MeanScore, MannKendall, WeigthedScore]: - visualizers = [StudyVisualizer(study, temporal_non_stationarity=True, verbose=True, - score_class=score_class) - for study in - study_iterator_global(study_classes=[study_class], only_first_one=only_first_one, - altitudes=altitudes)] - altitude_to_visualizer = OrderedDict(zip(altitudes, visualizers)) - visualizer = AltitudeVisualizer(altitude_to_visualizer, multiprocessing=False, save_to_file=save_to_file) - visualizer.negative_trend_percentages_evolution(reverse=True) - - -def altitude_trends_significant(): - save_to_file = False - only_first_one = False - # altitudes that have 20 massifs at least - altitudes = ALL_ALTITUDES[3:-6] - # altitudes = ALL_ALTITUDES[3:5] - altitudes = ALL_ALTITUDES[2:4] - for study_class in SCM_STUDIES[:1]: - trend_test_classes = [MannKendallTrendTest, GevLocationChangePointTest, GevScaleChangePointTest, GevShapeChangePointTest][:1] - visualizers = [StudyVisualizer(study, temporal_non_stationarity=True, verbose=False) - for study in study_iterator_global(study_classes=[study_class], only_first_one=only_first_one, - altitudes=altitudes)] - altitude_to_visualizer = OrderedDict(zip(altitudes, visualizers)) - visualizer = AltitudeVisualizer(altitude_to_visualizer, multiprocessing=False, save_to_file=save_to_file) - visualizer.trend_tests_percentage_evolution_with_altitude(trend_test_classes, starting_year_to_weights=None) - - -def main_run(): - # altitude_trends() - altitude_trends_significant() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies.py b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies.py deleted file mode 100644 index d956246d1ba7b431ac082493fd77afdcdbaa5f01..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies.py +++ /dev/null @@ -1,29 +0,0 @@ -from collections import OrderedDict -from typing import Dict - -from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy -from experiment.meteo_france_data.scm_models_data.scm_constants import ALTITUDES - - -class Studies(object): - """Object that will handle studies of the same study type (it could be Safran for instance) - at several altitudes""" - - def __init__(self, study_type, altitude_list=None) -> None: - # Load altitude_list attribute - if altitude_list is None: - altitude_list = ALTITUDES - else: - assert isinstance(altitude_list, list) - assert len(altitude_list) > 0 - assert all([altitudes in ALTITUDES for altitudes in altitude_list]) - altitude_list = sorted(altitude_list) - self.altitude_list = altitude_list - # Load altitude_to_study attribute - self.altitude_to_study = OrderedDict() # type: Dict[int, AbstractStudy] - for altitude in self.altitude_list: - self.altitude_to_study[altitude] = study_type(altitude=altitude) - - @property - def first_study(self): - return self.altitude_to_study[self.altitude_list[0]] diff --git a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py deleted file mode 100644 index f553d1ac624b5194fe5b6424b1acf76e3cb47510..0000000000000000000000000000000000000000 --- a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py +++ /dev/null @@ -1,250 +0,0 @@ -from collections import OrderedDict, Counter -import os -import os.path as op -from multiprocessing.dummy import Pool -from typing import Dict - -import numpy as np -import pandas as pd -import matplotlib.pyplot as plt -from matplotlib.lines import Line2D - -from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy -from experiment.meteo_france_data.scm_models_data.visualization.studies_visualization.studies import Studies -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ - StudyVisualizer -from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest -from experiment.meteo_france_data.scm_models_data.visualization.utils import plot_df -from root_utils import cached_property, get_display_name_from_object_type, VERSION_TIME - - -class StudiesVisualizer(object): - - def __init__(self, studies: Studies) -> None: - self.studies = studies - - @property - def first_study(self): - return self.studies.first_study - - def mean_as_a_function_of_altitude(self, region_only=False): - # Load the massif names to display - if region_only: - assert isinstance(self.first_study, AbstractExtendedStudy) - massif_names = self.first_study.region_names - else: - massif_names = self.first_study.study_massif_names - # Load the dictionary that maps each massif_name to its corresponding time series - mean_series = [] - for study in self.studies.altitude_to_study.values(): - mean_serie = study.df_annual_total.loc[:, massif_names].mean(axis=0) - mean_series.append(mean_serie) - df_mean = pd.concat(mean_series, axis=1) # type: pd.DataFrame - df_mean.columns = self.studies.altitude_list - plot_df(df_mean) - - -def get_percentages(v): - return v.percentages_of_negative_trends()[0] - - -class AltitudeVisualizer(object): - - def __init__(self, altitude_to_study_visualizer: Dict[int, StudyVisualizer], multiprocessing=False, - save_to_file=False): - self.save_to_file = save_to_file - self.multiprocessing = multiprocessing - assert isinstance(altitude_to_study_visualizer, OrderedDict) - self.altitude_to_study_visualizer = altitude_to_study_visualizer # type: Dict[int, StudyVisualizer] - - @property - def altitudes(self): - return list(self.altitude_to_study_visualizer.keys()) - - @cached_property - def all_percentages(self): - if self.multiprocessing: - with Pool(4) as p: - l = p.map(get_percentages, list(self.altitude_to_study_visualizer.values())) - else: - l = [get_percentages(v) for v in self.altitude_to_study_visualizer.values()] - return l - - @property - def any_study_visualizer(self) -> StudyVisualizer: - return list(self.altitude_to_study_visualizer.values())[0] - - @property - def study(self): - return self.any_study_visualizer.study - - def get_item_fct(self, year): - idx = self.any_study_visualizer.starting_years.index(year) - f = lambda s: s[idx] - return f - - @cached_property - def starting_year(self): - return self.any_study_visualizer.starting_years[0] - - def get_top_potential_years(self, reverse=False): - top_n = 5 - top_top = 3 - # keep the top_n for each altitude - all_years = [[year for year, _ in sorted(enumerate(p), key=lambda s: s[1], reverse=reverse)[-top_n:]] for p in - self.all_percentages] - from itertools import chain - all_years = list(chain(*all_years)) - years = [y for y, _ in sorted(Counter(all_years).items(), key=lambda s: s[1])[-top_top:]] - years = [y + self.starting_year for y in years] - return years - - def show_or_save_to_file(self, specific_title=''): - if self.save_to_file: - main_title, _ = '_'.join(self.study.title.split()).split('/') - filename = "{}/{}/".format(VERSION_TIME, main_title) - filename += specific_title - filepath = op.join(self.study.result_full_path, filename + '.png') - dirname = op.dirname(filepath) - if not op.exists(dirname): - os.makedirs(dirname, exist_ok=True) - plt.savefig(filepath) - else: - plt.show() - - def negative_trend_percentages_evolution(self, reverse=True): - curve_name__metric_and_color = [ - ('max', np.max, 'g'), - ('mean', np.mean, 'b'), - ('median', np.median, 'c'), - ('min', np.min, 'r'), - ] - # Add some years - # spotted_years = [1963, 1976] - # years_to_display = spotted_years - str_markers = ['o'] + [m for m in Line2D.markers if isinstance(m, str)][3:] - # for year, marker in zip(years_to_display, str_markers): - # new = (str(year), self.get_item_fct(year), 'y', marker + ':') - # curve_name__metric_and_color.append(new) - - for year, marker in zip(self.get_top_potential_years(), str_markers): - new = (str(year), self.get_item_fct(year), 'y', marker + ':') - curve_name__metric_and_color.append(new) - for year, marker in zip(self.get_top_potential_years(reverse=True), str_markers): - new = (str(year), self.get_item_fct(year), 'm', marker + ':') - curve_name__metric_and_color.append(new) - - fig, ax = plt.subplots(1, 1, figsize=self.any_study_visualizer.figsize) - for curve_name, metric, color, *marker in curve_name__metric_and_color[:]: - marker, curve_name = (marker[0], curve_name + ' starting year') if marker \ - else ('-', curve_name + ' over the starting years') - values = [metric(p) for p in self.all_percentages] - if reverse: - values = [100 - v for v in values] - k = ['max', 'min'] - for before, new in zip(k, k[::-1]): - if before in curve_name: - curve_name = curve_name.replace(before, new) - break - ax.plot(self.altitudes, values, color + marker, label=curve_name) - ax.legend() - ax.set_xticks(self.altitudes) - ax.set_yticks(list(range(0, 101, 10))) - ax.grid() - - ax.axhline(y=50, color='k') - word = 'positive' if reverse else 'negative' - ax.set_ylabel('% of massifs with {} trends'.format(word)) - ax.set_xlabel('altitude') - variable_name = self.study.variable_class.NAME - score_name = get_display_name_from_object_type(self.any_study_visualizer.score_class) - title = 'Evolution of {} trends wrt to the altitude with {}'.format(variable_name, score_name) - ax.set_title(title) - self.show_or_save_to_file(specific_title=title) - - """ - Trends tests - - In all the cases, I use all the massifs and I use all the altitude - - The only thing that can change, is that I use a different starting year distribution - (specified with starting_year_to_weights) dict - """ - - # Trend tests repartition - - def trend_tests_percentage_repartition_spatially(self, trend_test_classes, starting_year_to_weights: None): - pass - - - # Trend tests evolution - - def trend_tests_percentage_evolution_with_altitude(self, trend_test_classes, starting_year_to_weights: None): - # Load uniform weights by default - if starting_year_to_weights is None: - startings_years = self.any_study_visualizer.starting_years - uniform_weight = 1 / len(startings_years) - starting_year_to_weights = {year: uniform_weight for year in startings_years} - - # To get a single year, I could do: - # starting_year_to_weights = {1980: 1.0} - else: - uniform_weight = 0.0 - - fig, ax = plt.subplots(1, 1, figsize=self.any_study_visualizer.figsize) - - # Create one display for each trend test class - markers = ['o', 's', 'D', '*'] - assert len(markers) >= len(trend_test_classes) - # Add a second legend for the color and to explain the line - for marker, trend_test_class in zip(markers, trend_test_classes): - self.trend_test_class_weighted_percentages(ax, marker, trend_test_class, starting_year_to_weights) - - # Add the color legend - handles, labels = ax.get_legend_handles_labels() - unique_labels = set(labels) - idx_labels = sorted([labels.index(label) for label in unique_labels]) - handles_ax, labels_ax = [handles[i] for i in idx_labels], [labels[i] for i in idx_labels] - ax.legend(handles_ax, labels_ax, markerscale=0.0, loc=1) - ax.set_xticks(self.altitudes) - ax.set_yticks(list(range(0, 101, 10))) - ax.grid() - - # Add the marker legend - names = [get_display_name_from_object_type(c) for c in trend_test_classes] - idx_for_positive_trend = [i for i, label in enumerate(labels) if label == AbstractUnivariateTest.POSITIVE_TREND] - handles_ax2, labels_ax2 = [handles[i] for i in idx_for_positive_trend], names - ax2 = ax.twinx() - ax2.legend(handles_ax2, labels_ax2, loc=2) - ax2.set_yticks([]) - - # Global information - added_str = '' if uniform_weight > 0.0 else 'weighted ' - ylabel = '% averaged on massifs & {}averaged on starting years'.format(added_str) - ylabel += ' (with uniform weights)' - ax.set_ylabel(ylabel) - ax.set_xlabel('altitude') - variable_name = self.study.variable_class.NAME - title = 'Evolution of {} trends (significative or not) wrt to the altitude with {}'.format(variable_name, - ', '.join(names)) - ax.set_title(title) - self.show_or_save_to_file(specific_title=title) - - def trend_test_class_weighted_percentages(self, ax, marker, trend_test_class, starting_year_to_weights): - # Build OrderedDict mapping altitude to a mean serie - altitude_to_serie_with_mean_percentages = OrderedDict() - for altitude, study_visualizer in self.altitude_to_study_visualizer.items(): - s = study_visualizer.df_trend_test_count(trend_test_class, starting_year_to_weights).mean(axis=1) - altitude_to_serie_with_mean_percentages[altitude] = s - # Plot weighted percentages over the years - for trend_type, style in trend_test_class.display_trend_type_to_style().items(): - - weighted_percentages = [v.loc[trend_type] if trend_type in v.index else 0.0 - for v in altitude_to_serie_with_mean_percentages.values()] - if set(weighted_percentages) == {0.0}: - ax.plot([], [], style + marker, label=trend_type) - else: - ax.plot(self.altitudes, weighted_percentages, style + marker, label=trend_type - ) - # todo: If I wanted I could display here with a degradé, all the year composing the keys of starting_year_to_weights dictionary - # maybe I could do that only when the dictionary is small for instance diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/__init__.py b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/study_visualizer.py similarity index 100% rename from experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py rename to experiment/meteo_france_data/scm_models_data/visualization/study_visualizer.py diff --git a/experiment/meteo_france_data/stations_data/comparison_analysis.py b/experiment/meteo_france_data/stations_data/comparison_analysis.py index 9b1a9d802742171f38058449f03533ee4a869078..0eb48e2a5203eb37800363d3af53e6e278cb90af 100644 --- a/experiment/meteo_france_data/stations_data/comparison_analysis.py +++ b/experiment/meteo_france_data/stations_data/comparison_analysis.py @@ -5,7 +5,7 @@ from typing import List from cached_property import cached_property from experiment.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ ALL_ALTITUDES from extreme_fit.estimator.full_estimator.abstract_full_estimator import \ FullEstimatorInASingleStepWithSmoothMargin diff --git a/experiment/meteo_france_data/stations_data/main_station_comparison.py b/experiment/meteo_france_data/stations_data/main_station_comparison.py index fa5cf0d0a2ded453510dee0674aeb95cd8b241d6..03f44f67073394fe68e5ca3cbac3f7d2c850c48a 100644 --- a/experiment/meteo_france_data/stations_data/main_station_comparison.py +++ b/experiment/meteo_france_data/stations_data/main_station_comparison.py @@ -1,4 +1,4 @@ -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ ALL_ALTITUDES_WITH_20_STATIONS_AT_LEAST from experiment.meteo_france_data.stations_data.visualization.comparisons_visualization.comparisons_visualization import \ ComparisonsVisualization, path_backup_csv_file diff --git a/experiment/meteo_france_data/stations_data/main_station_comparison_all_altitudes.py b/experiment/meteo_france_data/stations_data/main_station_comparison_all_altitudes.py index 1d5f2e995bdb1efb1948f8b800a5ddc022088f77..d9469c794b2c13593d8be85ac75606d6e321aa28 100644 --- a/experiment/meteo_france_data/stations_data/main_station_comparison_all_altitudes.py +++ b/experiment/meteo_france_data/stations_data/main_station_comparison_all_altitudes.py @@ -1,4 +1,4 @@ -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ ALL_ALTITUDES from experiment.meteo_france_data.stations_data.visualization.comparisons_visualization.comparisons_visualization import \ ComparisonsVisualization diff --git a/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py b/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py index 7be7297adac50bdefe23884753541c8b43e7faf5..ed4e689d7d37666f42894c0b6b000739c040a06f 100644 --- a/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py +++ b/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py @@ -10,7 +10,7 @@ import numpy as np import pandas as pd from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ VisualizationParameters from experiment.meteo_france_data.stations_data.comparison_analysis import ComparisonAnalysis, MASSIF_COLUMN_NAME, \ REANALYSE_STR, ALTITUDE_COLUMN_NAME, STATION_COLUMN_NAME diff --git a/experiment/regression_margin/__init__.py b/experiment/regression_margin/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/experiment/regression_margin/regression_margin.py b/experiment/regression_margin/regression_margin.py deleted file mode 100644 index 33b67924c6eba9e4bdd09609154fa872dea70ee6..0000000000000000000000000000000000000000 --- a/experiment/regression_margin/regression_margin.py +++ /dev/null @@ -1,66 +0,0 @@ -import numpy as np - -from extreme_fit.estimator.full_estimator.abstract_full_estimator import FullEstimatorInASingleStepWithSmoothMargin -from extreme_fit.function.margin_function.linear_margin_function import LinearMarginFunction -from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import LinearAllParametersAllDimsMarginModel, \ - ConstantMarginModel -from extreme_fit.model.max_stable_model.max_stable_models import Smith -from extreme_fit.distribution.gev.gev_params import GevParams -from spatio_temporal_dataset.coordinates.spatial_coordinates.coordinates_1D import LinSpaceSpatialCoordinates -import matplotlib.pyplot as plt - -from spatio_temporal_dataset.dataset.simulation_dataset import FullSimulatedDataset - -nb_points = 50 -nb_obs = 60 -nb_estimator = 2 -show = False - -coordinates = LinSpaceSpatialCoordinates.from_nb_points(nb_points=nb_points) - -########## GENERATING THE DATA ##################### - -# MarginModel Linear with respect to the shape (from 0.01 to 0.02) -params_sample = { - # (GevParams.GEV_SHAPE, 0): 0.2, - (GevParams.LOC, 0): 10, - (GevParams.SHAPE, 0): 1.0, - (GevParams.SCALE, 0): 1.0, -} -margin_model = ConstantMarginModel(coordinates=coordinates, params_sample=params_sample) -margin_model_for_estimator_class = [LinearAllParametersAllDimsMarginModel, ConstantMarginModel][-1] -max_stable_model = Smith() - - -######### FITTING A MODEL ################# - - -axes = None -for i in range(nb_estimator): - print("{}/{}".format(i+1, nb_estimator)) - # Data part - dataset = FullSimulatedDataset.from_double_sampling(nb_obs=nb_obs, margin_model=margin_model, - coordinates=coordinates, - max_stable_model=max_stable_model) - - if show and i == 0: - # Plot a realization from the maxima distribution (i.e the maxima obtained just by simulating the marginal law) - for maxima in np.transpose(dataset.maxima_frech()): - plt.plot(coordinates.coordinates_values(), maxima, 'o') - plt.show() - - margin_function_sample = dataset.margin_model.margin_function_sample # type: LinearMarginFunction - margin_function_sample.visualize_function(show=False, axes=axes, dot_display=True) - axes = margin_function_sample.visualization_axes - - # Estimation part - margin_model_for_estimator = margin_model_for_estimator_class(coordinates) - full_estimator = FullEstimatorInASingleStepWithSmoothMargin(dataset, margin_model_for_estimator, max_stable_model) - full_estimator.fit() - full_estimator.function_from_fit.visualize_function(axes=axes, show=False) -plt.show() - -# Display all the margin on the same graph for comparison - -# Plot the margin functions -# margin_model.margin_function_sample.visualize_2D() diff --git a/extreme_fit/function/margin_function/abstract_margin_function.py b/extreme_fit/function/margin_function/abstract_margin_function.py index 994e7ebc9ba57afb8c3905a75b9d23c09bc50d68..981a25aa47d62995661c8b877579d46cde07f765 100644 --- a/extreme_fit/function/margin_function/abstract_margin_function.py +++ b/extreme_fit/function/margin_function/abstract_margin_function.py @@ -6,7 +6,7 @@ import pandas as pd from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes from extreme_fit.distribution.gev.gev_params import GevParams -from experiment.meteo_france_data.plot.create_shifted_cmap import imshow_shifted +from experiment.meteo_france_data.scm_models_data.visualization.create_shifted_cmap import imshow_shifted from extreme_fit.function.abstract_function import AbstractFunction from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates from spatio_temporal_dataset.slicer.split import Split diff --git a/papers/contrasting_snow_loads/main_spatial_relative_change_in_maxima_at_fixed_altitude.py b/papers/contrasting_snow_loads/main_spatial_relative_change_in_maxima_at_fixed_altitude.py index 17d48f3fc1c0a1f5fb2f5066f3e5c570d9b94812..881ac233d0fa1f63ad391bff7c324f21d4de5c7c 100644 --- a/papers/contrasting_snow_loads/main_spatial_relative_change_in_maxima_at_fixed_altitude.py +++ b/papers/contrasting_snow_loads/main_spatial_relative_change_in_maxima_at_fixed_altitude.py @@ -1,20 +1,10 @@ -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusDepth, CrocusSnowLoad3Days, \ +from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoad3Days, \ CrocusSnowLoadTotal -from experiment.meteo_france_data.scm_models_data.crocus.crocus_variables import CrocusDepthVariable from experiment.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall, SafranRainfall, SafranPrecipitation -from experiment.meteo_france_data.scm_models_data.safran.safran_variable import SafranTotalPrecipVariable -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION, snow_density_str, ALL_ALTITUDES_WITHOUT_NAN -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer import matplotlib.pyplot as plt -from papers.exceeding_snow_loads.discussion_data_comparison_with_eurocode.crocus_study_comparison_with_eurocode import \ - CrocusDifferenceSnowLoad, \ - CrocusSnowDensityAtMaxofSwe, CrocusDifferenceSnowLoadRescaledAndEurocodeToSeeSynchronization, \ - CrocusSnowDepthAtMaxofSwe, CrocusSnowDepthDifference -from papers.exceeding_snow_loads.paper_utils import dpi_paper1_figure - def test(): study = CrocusSnowLoad3Days(altitude=1200) diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/mixed_distribution/mixed_distribution_impact.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/mixed_distribution/mixed_distribution_impact.py index 0bea22245ebef006d9988cebe558b706c0d23a3b..e713a8277d1429b7d18f1df048b06df23e7c1934 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/mixed_distribution/mixed_distribution_impact.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/mixed_distribution/mixed_distribution_impact.py @@ -1,10 +1,6 @@ import pandas as pd from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - ALL_ALTITUDES_WITHOUT_NAN -from extreme_fit.model.result_from_model_fit.result_from_extremes.confidence_interval_method import \ - ConfidenceIntervalMethodFromExtremes from papers.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import StudyVisualizerForNonStationaryTrends diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py index 3109e7ea594c265c373534f755701b17aeb230d0..5b66afc5453ba871efcb4e50118dc86c30f7363e 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/main_qqplot_for_big_shapes.py @@ -1,8 +1,7 @@ -from typing import Dict import matplotlib.pyplot as plt from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ ALL_ALTITUDES_WITHOUT_NAN from papers.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \ StudyVisualizerForNonStationaryTrends diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py index 1aaa9a01722cc5dc392c104ee70e3155202fb5b9..f3de8750233168d81c024d394b95a8f1ebda0e7f 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/qqplot/plot_qqplot.py @@ -7,8 +7,6 @@ import pandas as pd from matplotlib.ticker import PercentFormatter from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - ALL_ALTITUDES_WITHOUT_NAN from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ TemporalMarginFitMethod from extreme_fit.model.result_from_model_fit.result_from_extremes.abstract_extract_eurocode_return_level import \ diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/shape/study_visualizer_for_shape_repartition.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/shape/study_visualizer_for_shape_repartition.py index 59d377f79f48fad285074cda3de9b5731e66f176..241c8508bdc0cfc61091134fc3c050cd7a932cca 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/shape/study_visualizer_for_shape_repartition.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/shape/study_visualizer_for_shape_repartition.py @@ -1,11 +1,10 @@ import matplotlib from cached_property import cached_property -from experiment.meteo_france_data.plot.create_shifted_cmap import get_shifted_map +from experiment.meteo_france_data.scm_models_data.visualization.create_shifted_cmap import get_shifted_map from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy from papers.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \ StudyVisualizerForNonStationaryTrends -from experiment.trend_analysis.abstract_score import MeanScore class StudyVisualizerForShape(StudyVisualizerForNonStationaryTrends): diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/main_fit_without_maximum.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/main_fit_without_maximum.py index 22faaf45b72933fea0c3460a10f72d557ef17980..834198ee6096972f0d81e2821f453e52f1ea1076 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/main_fit_without_maximum.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/main_fit_without_maximum.py @@ -1,7 +1,7 @@ from typing import Dict from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ ALL_ALTITUDES_WITHOUT_NAN from papers.exceeding_snow_loads.check_mle_convergence_for_trends.without_maximum.study_visualizer_for_fit_witout_maximum import \ StudyVisualizerForFitWithoutMaximum diff --git a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py index 2ce354fc7a4baa4118cedfc0b0b43c201f6c7ae7..e1f62b2d01d6c4085463be2d21b37426a77b8083 100644 --- a/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py +++ b/papers/exceeding_snow_loads/check_mle_convergence_for_trends/without_maximum/study_visualizer_for_fit_witout_maximum.py @@ -1,14 +1,11 @@ from typing import Dict, Tuple -import matplotlib import numpy as np from cached_property import cached_property -from experiment.meteo_france_data.plot.create_shifted_cmap import get_shifted_map from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy from papers.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \ StudyVisualizerForNonStationaryTrends -from experiment.trend_analysis.abstract_score import MeanScore class StudyVisualizerForFitWithoutMaximum(StudyVisualizerForNonStationaryTrends): diff --git a/papers/exceeding_snow_loads/data/main_example_swe_total_plot.py b/papers/exceeding_snow_loads/data/main_example_swe_total_plot.py index c96cc96b86c2d4c935a321c8a80f644c36250020..efdd80b09e6df9f72b46326207080f5cde672d7b 100644 --- a/papers/exceeding_snow_loads/data/main_example_swe_total_plot.py +++ b/papers/exceeding_snow_loads/data/main_example_swe_total_plot.py @@ -1,9 +1,9 @@ import matplotlib.pyplot as plt from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer from papers.exceeding_snow_loads.paper_utils import dpi_paper1_figure diff --git a/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_examples.py b/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_examples.py index a7026f8fd0ced96524fd4dac08de80c5b234c798..0017f555101e1ba37fb5f5fe14649161123eb85c 100644 --- a/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_examples.py +++ b/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_examples.py @@ -1,19 +1,15 @@ -import matplotlib as mpl # mpl.rcParams['text.usetex'] = True # mpl.rcParams['text.latex.preamble'] = [r'\usepackage{amsmath}'] -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusDepth -from experiment.meteo_france_data.scm_models_data.crocus.crocus_variables import CrocusDepthVariable -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION, snow_density_str -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer import matplotlib.pyplot as plt from papers.exceeding_snow_loads.discussion_data_comparison_with_eurocode.crocus_study_comparison_with_eurocode import \ CrocusDifferenceSnowLoad, \ - CrocusSnowDensityAtMaxofSwe, CrocusDifferenceSnowLoadRescaledAndEurocodeToSeeSynchronization, \ - CrocusSnowDepthAtMaxofSwe, CrocusSnowDepthDifference + CrocusSnowDensityAtMaxofSwe, CrocusSnowDepthDifference from papers.exceeding_snow_loads.paper_utils import dpi_paper1_figure diff --git a/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_global.py b/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_global.py index 6a8342040ca621b4d712610dc1187fa500f7799e..07069c5f7c52b257ed6b4a2d31b5a9dc53b57567 100644 --- a/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_global.py +++ b/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_comparison_with_eurocode_global.py @@ -1,15 +1,12 @@ -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusDepth -from experiment.meteo_france_data.scm_models_data.crocus.crocus_variables import CrocusDepthVariable -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION, snow_density_str -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ + study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer import matplotlib.pyplot as plt from experiment.exceeding_snow_loads.discussion_data_comparison_with_eurocode.crocus_study_comparison_with_eurocode import \ CrocusDifferenceSnowLoad, \ - CrocusSnowDensityAtMaxofSwe, CrocusDifferenceSnowLoadRescaledAndEurocodeToSeeSynchronization, \ - CrocusSnowDepthAtMaxofSwe, CrocusSnowDepthDifference + CrocusSnowDensityAtMaxofSwe, CrocusSnowDepthDifference from experiment.exceeding_snow_loads.paper_utils import dpi_paper1_figure diff --git a/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_spatio_temporal_density_wrt_altitude.py b/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_spatio_temporal_density_wrt_altitude.py index 0c4dfadc594de7726a985a726f74cb17e5ae573b..96c796f251b2fef49c17c5d5651b973702e6a57e 100644 --- a/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_spatio_temporal_density_wrt_altitude.py +++ b/papers/exceeding_snow_loads/discussion_data_comparison_with_eurocode/main_spatio_temporal_density_wrt_altitude.py @@ -1,15 +1,11 @@ -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusDepth -from experiment.meteo_france_data.scm_models_data.crocus.crocus_variables import CrocusDepthVariable -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION, snow_density_str -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ + study_iterator_global, SCM_STUDY_CLASS_TO_ABBREVIATION +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer import matplotlib.pyplot as plt from experiment.exceeding_snow_loads.discussion_data_comparison_with_eurocode.crocus_study_comparison_with_eurocode import \ - CrocusDifferenceSnowLoad, \ - CrocusSnowDensityAtMaxofSwe, CrocusDifferenceSnowLoadRescaledAndEurocodeToSeeSynchronization, \ - CrocusSnowDepthAtMaxofSwe, CrocusSnowDepthDifference + CrocusSnowDensityAtMaxofSwe from experiment.exceeding_snow_loads.paper_utils import dpi_paper1_figure diff --git a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/__init__.py b/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main2_choice_to_not_use_starting_years.py b/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main2_choice_to_not_use_starting_years.py deleted file mode 100644 index d6fc5ab39673686501ecc3bad9e777c18d08a207..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main2_choice_to_not_use_starting_years.py +++ /dev/null @@ -1,36 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer, FULL_ALTITUDES - - -def main_fast_spatial_repartition(): - for altitude in FULL_ALTITUDES[-1:]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958) - vizualiser.save_to_file = False - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_full_spatial_repartition(): - for altitude in FULL_ALTITUDES[:]: - # Compute for the most likely starting year - # vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude) - # vizualiser.visualize_massif_trend_test_one_altitude() - # Compute the trend for a linear trend - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958) - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_run(): - # main_full_spatial_repartition() - main_fast_spatial_repartition() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main3_non_stationary_strength_evolution.py b/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main3_non_stationary_strength_evolution.py deleted file mode 100644 index 368008f2611ab0341bdc4508d802efc5a553837c..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main3_non_stationary_strength_evolution.py +++ /dev/null @@ -1,42 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevScaleTrendTest, \ - GevLocationTrendTest -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import GevLocationAndScaleTrendTest - -""" -Visualize the 0.99 quantile initial value and its evolution -""" -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer, FULL_ALTITUDES - - -def main_fast_spatial_risk_evolution(): - for altitude in [1800]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=True, - trend_test_class=GevLocationAndScaleTrendTest) - vizualiser.save_to_file = False - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_full_spatial_risk_evolution(): - for altitude in FULL_ALTITUDES[:]: - for trend_test_class in [GevLocationTrendTest, GevScaleTrendTest, GevLocationAndScaleTrendTest][-1:]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=True, - trend_test_class=trend_test_class) - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_run(): - # main_full_spatial_risk_evolution() - main_fast_spatial_risk_evolution() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_common_spatial_altitude_starting_years_impact.py b/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_common_spatial_altitude_starting_years_impact.py deleted file mode 100644 index 443a0c6a5eab74ae8c86620377911ef8a442c7af..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_common_spatial_altitude_starting_years_impact.py +++ /dev/null @@ -1,56 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer, AltitudeHypercubeVisualizerWithoutTrendType -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import GevLocationAndScaleTrendTest - -""" -Visualize the 0.99 quantile initial value and its evolution -""" -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer, FULL_ALTITUDES - - -def main_fast_spatial_risk_evolution(): - vizualiser = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendType, altitude=None, - reduce_strength_array=True, - trend_test_class=GevLocationAndScaleTrendTest, - offset_starting_year=28) - vizualiser.save_to_file = False - vizualiser.sigma_for_best_year = 1.0 - res = vizualiser.visualize_year_trend_test(subtitle_specified='CrocusSwe3Days') - print(res) - - -def main_full_spatial_risk_evolution(): - for trend_test_class in [GevLocationAndScaleTrendTest]: - # Compare the risk with and without taking into account the starting year - vizualiser = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendType, altitude=None, - reduce_strength_array=True, - trend_test_class=trend_test_class, - offset_starting_year=20) - vizualiser.sigma_for_best_year = 1.0 - res = vizualiser.visualize_year_trend_test(subtitle_specified='CrocusSwe3Days') - best_year = res[0][1] - for altitude in FULL_ALTITUDES[:]: - # Starting Year=1958 - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=True, - trend_test_class=trend_test_class) - vizualiser.visualize_massif_trend_test_one_altitude() - # Optimal common starting year - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=best_year, reduce_strength_array=True, - trend_test_class=trend_test_class) - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_run(): - main_full_spatial_risk_evolution() - # main_fast_spatial_risk_evolution() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_common_spatial_starting_years_impact.py b/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_common_spatial_starting_years_impact.py deleted file mode 100644 index 3ad25abdd3c1433a2a7dbacc1e1dd5c56cd683e0..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_common_spatial_starting_years_impact.py +++ /dev/null @@ -1,58 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer, AltitudeHypercubeVisualizerWithoutTrendType -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import GevLocationAndScaleTrendTest - -""" -Visualize the 0.99 quantile initial value and its evolution -""" -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer, FULL_ALTITUDES - - -def main_fast_spatial_risk_evolution(): - for altitude in [1800]: - vizualiser = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendType, altitude=altitude, - reduce_strength_array=True, - trend_test_class=GevLocationAndScaleTrendTest, - offset_starting_year=20) - vizualiser.save_to_file = False - res = vizualiser.visualize_year_trend_test(subtitle_specified='CrocusSwe3Days') - print(res) - vizualiser.visualize_massif_trend_test_one_altitude() - vizualiser.reduce_strength_array = True - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_full_spatial_risk_evolution(): - # Compare the risk with and without taking into account the starting year - for altitude in FULL_ALTITUDES[-1:]: - for trend_test_class in [GevLocationAndScaleTrendTest]: - # Starting Year=1958 - # vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - # exact_starting_year=1958, reduce_strength_array=True, - # trend_test_class=trend_test_class) - # vizualiser.visualize_massif_trend_test_one_altitude() - # Optimal common starting year - vizualiser = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendType, altitude=altitude, - reduce_strength_array=True, - trend_test_class=trend_test_class, - offset_starting_year=20) - res = vizualiser.visualize_year_trend_test(subtitle_specified='CrocusSwe3Days') - best_year = res[0][1] - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=best_year, reduce_strength_array=True, - trend_test_class=trend_test_class) - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_run(): - main_full_spatial_risk_evolution() - # main_fast_spatial_risk_evolution() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_individual_starting_years_impact.py b/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_individual_starting_years_impact.py deleted file mode 100644 index 9a38884fb86b7dc7c8668ca5aab79db8efdb7752..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/1 - non stationary model choice/main4_individual_starting_years_impact.py +++ /dev/null @@ -1,51 +0,0 @@ -import time - -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import GevLocationAndScaleTrendTest - -""" -Visualize the 0.99 quantile initial value and its evolution -""" -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer, FULL_ALTITUDES - - -def main_fast_spatial_risk_evolution(): - for altitude in [1800]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - reduce_strength_array=False, - trend_test_class=GevLocationAndScaleTrendTest, - offset_starting_year=20) - vizualiser.save_to_file = False - vizualiser.visualize_massif_trend_test_one_altitude() - vizualiser.reduce_strength_array = True - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_full_spatial_risk_evolution(): - # Compare the risk with and without taking into account the starting year - for altitude in FULL_ALTITUDES[-2:-1]: - for trend_test_class in [GevLocationAndScaleTrendTest]: - # vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - # exact_starting_year=1958, reduce_strength_array=True, - # trend_test_class=trend_test_class) - # vizualiser.visualize_massif_trend_test_one_altitude() - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - reduce_strength_array=True, - trend_test_class=trend_test_class, - offset_starting_year=20) - vizualiser.visualize_massif_trend_test_one_altitude() - vizualiser.reduce_strength_array = False - vizualiser.visualize_massif_trend_test_one_altitude() - - -def main_run(): - main_full_spatial_risk_evolution() - # main_fast_spatial_risk_evolution() - - -if __name__ == '__main__': - start = time.time() - main_run() - duration = time.time() - start - print('Full run took {}s'.format(round(duration, 1))) diff --git a/papers/exceeding_snow_loads/paper1_old/__init__.py b/papers/exceeding_snow_loads/paper1_old/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/__init__.py b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/main_poster_EVAN2019.py b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/main_poster_EVAN2019.py deleted file mode 100644 index df96d6bddc37d9210db063b91cd71b77cbec2e38..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/main_poster_EVAN2019.py +++ /dev/null @@ -1,138 +0,0 @@ -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSwe3Days -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer, AltitudeHypercubeVisualizerWithoutTrendType -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - SCM_STUDIES -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevScaleTrendTest, \ - GevLocationTrendTest -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import GevLocationAndScaleTrendTest -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer - -POSTER_ALTITUDES = [900, 1800, 2700] -import matplotlib as mpl - -mpl.rcParams['hatch.linewidth'] = 0.3 - - -def main_poster_A_non_stationary_model_choice(): - nb = 1 - for altitude in POSTER_ALTITUDES[:]: - for trend_test_class in [GevLocationTrendTest, GevScaleTrendTest, GevLocationAndScaleTrendTest][-nb:]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=False, - trend_test_class=trend_test_class, - ) - # vizualiser.save_to_file = False - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False) - - -def main_poster_B_starting_years_analysis(): - nb = 3 - for altitude in POSTER_ALTITUDES[:nb]: - for trend_test_class in [GevLocationAndScaleTrendTest]: - # 1958 as starting year - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=False, - trend_test_class=trend_test_class, - ) - for d in [True, False]: - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, - display_trend_color=d) - # vizualiser.save_to_file = False - - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False) - # Optimal common starting year - vizualiser = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendType, altitude=altitude, - reduce_strength_array=True, - trend_test_class=trend_test_class, - offset_starting_year=20) - res = vizualiser.visualize_year_trend_test(subtitle_specified='CrocusSwe3Days') - best_year = res[0][1] - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=best_year, reduce_strength_array=False, - trend_test_class=trend_test_class) - for d in [True, False]: - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, - display_trend_color=d) - # Individual most likely starting year for each massif - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - reduce_strength_array=False, - trend_test_class=trend_test_class, - offset_starting_year=20) - for d in [True, False]: - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, - display_trend_color=d) - -# def main_poster_B_test(): -# nb = 3 -# for altitude in POSTER_ALTITUDES[:1]: -# for trend_test_class in [GevLocationAndScaleTrendTest]: -# # # 1958 as starting year -# vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, -# exact_starting_year=1958, reduce_strength_array=False, -# trend_test_class=trend_test_class, -# ) -# # vizualiser.save_to_file = False -# vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, -# display_trend_color=False) -# vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, -# display_trend_color=True) -# # # Optimal common starting year -# vizualiser = get_full_altitude_visualizer(AltitudeHypercubeVisualizerWithoutTrendType, altitude=altitude, -# reduce_strength_array=True, -# trend_test_class=trend_test_class, -# offset_starting_year=20) -# res = vizualiser.visualize_year_trend_test(subtitle_specified='CrocusSwe3Days') -# best_year = res[0][1] -# vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, -# exact_starting_year=best_year, reduce_strength_array=False, -# trend_test_class=trend_test_class) -# vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, -# display_trend_color=False) -# vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False, -# display_trend_color=True) -# # vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False) -# # Individual most likely starting year for each massif -# # vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, -# # reduce_strength_array=False, -# # trend_test_class=trend_test_class, -# # offset_starting_year=50) -# # # vizualiser.save_to_file = False -# # vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=True, -# # display_trend_color=False) - - - -def main_poster_C_orientation_analysis(): - """By default the slope is equal to 20""" - nb = 0 - cardinal_orientations = [0.0, 90.0, 180.0, 270.0] - trend_test_class = GevLocationAndScaleTrendTest - for altitude in POSTER_ALTITUDES[nb:]: - study_class = CrocusSwe3Days - for orientation in cardinal_orientations[nb:]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=False, - trend_test_class=trend_test_class, - study_class=study_class, - orientation=orientation) - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False) - - -def main_poster_D_other_quantities_analysis(): - nb = 3 - trend_test_class = GevLocationAndScaleTrendTest - for altitude in POSTER_ALTITUDES[:nb]: - for study_class in SCM_STUDIES[:nb]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=False, - trend_test_class=trend_test_class, - study_class=study_class) - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False) - - -if __name__ == '__main__': - main_poster_A_non_stationary_model_choice() - # main_poster_B_starting_years_analysis() - # main_poster_C_orientation_analysis() - # main_poster_D_other_quantities_analysis() diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/__init__.py b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/analyse_shape_from_some_experiment.py b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/analyse_shape_from_some_experiment.py deleted file mode 100644 index 97c42f9a441b8b3abc1ee698ba13a6acee69f81b..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/analyse_shape_from_some_experiment.py +++ /dev/null @@ -1,10 +0,0 @@ -import matplotlib.pyplot as plt - -# Load shapes -lines = [] -with open('shape_from_some_experiment.txt') as f: - for l in f: - lines.append(float(l.split('\n')[0])) -# Build his -plt.hist(lines, bins=50, histtype='step') -plt.show() \ No newline at end of file diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/hist_values_shape.png b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/hist_values_shape.png deleted file mode 100644 index 2aa54e3a4c6b777ae861ae7a2195f278cd138626..0000000000000000000000000000000000000000 Binary files a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/hist_values_shape.png and /dev/null differ diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/shape_from_some_experiment.txt b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/shape_from_some_experiment.txt deleted file mode 100644 index 813b089896d72ada17f7dff66af0991fc9151383..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/shape_from_some_experiment.txt +++ /dev/null @@ -1,396 +0,0 @@ --0.17986546997305747 --0.21512821907488117 --0.22839258514334876 --0.2498225571434785 --0.10453885381717745 --0.11536301350989701 --0.21098040403900203 --0.3050659290512925 --0.15925873857758652 --0.17268190177825643 --0.2594056844372453 --0.28039798550349143 --0.06722834840436546 --0.1479118601104086 --0.21433354559531342 --0.28211290009969725 --0.0887680562068616 --0.2208205997056717 --0.10494743088832467 --0.11452045521531878 -0.028945433687082932 -0.0067015122458568815 --0.1472801324090879 --0.22098011094150238 --0.0610051579422065 --0.09022142187635976 --0.13520802315098068 --0.12338337567622575 --0.011899752730240097 --0.05310318658946517 --0.06023136975812889 --0.06563016191691054 --0.03974170346172862 --0.03827310510219756 --0.21367056961008526 --0.20203181697856493 --0.21188535197847605 --0.1639189163167375 --0.10616033170445793 --0.10950723984817709 -0.13299031367466324 -0.13458707050406582 -0.07547256999354447 -0.07553864770823433 -0.23444800760973622 -0.23630698649068987 --0.17986546997305747 --0.12857827749906253 --0.22839258514334876 --0.23992665528817214 --0.10453885381717745 --0.09501113151618684 --0.21098040403900203 --0.267924138055065 --0.15925873857758652 --0.1632068762563936 --0.2594056844372453 --0.29430696329291695 --0.06722834840436546 --0.1723186485631052 --0.21433354559531342 --0.2820272223403669 --0.0887680562068616 --0.1998469561489447 --0.10494743088832467 --0.10687122711559201 -0.028945433687082932 --0.0035694057343077634 --0.1472801324090879 --0.16226667617602392 --0.0610051579422065 --0.1881631947332648 --0.13520802315098068 --0.18064106727025547 --0.011899752730240097 --0.044448559505585454 --0.06023136975812889 --0.0603700543349135 --0.03974170346172862 --0.046393073508512156 --0.21367056961008526 --0.18470833159640074 --0.21188535197847605 --0.2512896889755669 --0.10616033170445793 --0.11765252724444987 -0.13299031367466324 -0.11442126594429894 -0.07547256999354447 -0.06645974620355453 -0.23444800760973622 -0.23607921468817256 --0.17986546997305747 --0.11767421855242631 --0.22839258514334876 --0.24300684469910894 --0.10453885381717745 --0.03744201572720143 --0.21098040403900203 --0.24560126821798106 --0.15925873857758652 --0.16152378998653077 --0.2594056844372453 --0.3007158748567143 --0.06722834840436546 --0.15841519158251238 --0.21433354559531342 --0.2686522942568237 --0.0887680562068616 --0.14987108005772576 --0.10494743088832467 --0.11016932479851534 -0.028945433687082932 --0.018940487347995708 --0.1472801324090879 --0.15248545102652994 --0.0610051579422065 --0.21025410178814968 --0.13520802315098068 --0.17856344747026348 --0.011899752730240097 --0.04010152067388792 --0.06023136975812889 --0.060863919099542585 --0.03974170346172862 --0.04705695832001752 --0.21367056961008526 --0.18568613549947116 --0.21188535197847605 --0.22060391513735975 --0.10616033170445793 --0.12036731892022036 -0.13299031367466324 -0.11117383548536468 -0.07547256999354447 -0.07252132278308279 -0.23444800760973622 -0.23727953535405227 --0.07167408155700153 --0.07284068855576813 --0.09537461461592106 --0.09129636040505239 --0.03565240312207722 --0.03632498473754695 --0.03820593178143043 --0.07121744555334567 --0.04012032394334429 --0.0421795471212244 --0.38124963216329727 --0.4072920662869968 -0.13908474912174423 -0.31108640183679825 --0.06366732312024873 --0.08142802214894124 --0.04043456397190185 --0.06209554218262815 --0.10306101124433735 --0.105061605902908 -0.26495290044739506 -0.2651693537670664 --0.0554182614697758 --0.12952225331810446 --0.006287103759081175 --0.023577383744064054 -0.09567628293191234 -0.19154502325337455 -0.14014740161102704 -0.09305836929837064 -0.06304171036103452 -0.06811512321802246 -0.24709760299667172 -0.24546539155981217 -0.16576276213650265 -0.16682452330147396 --0.04716342309438759 --0.050693380192965404 --0.018339224787634383 --0.04390430160195761 -0.034030477584533134 -0.03221808998045728 --0.006686894148799695 --0.003653548445140834 --0.0409527995366322 --0.04330533674810822 --0.07167408155700153 --0.07385322485385248 --0.09537461461592106 --0.08691089793940054 --0.03565240312207722 --0.035961500074552614 --0.03820593178143043 --0.04904968521387157 --0.04012032394334429 --0.040058422832121424 --0.38124963216329727 --0.4255775255857207 -0.13908474912174423 -0.32948472064054846 --0.06366732312024873 --0.06581014041501734 --0.04043456397190185 --0.0473115474051234 --0.10306101124433735 --0.1034503812140259 -0.26495290044739506 -0.26583070811307496 --0.0554182614697758 --0.10382805329318165 --0.006287103759081175 --0.06161039910393466 -0.09567628293191234 -0.1707671228679576 -0.14014740161102704 -0.10063236364861382 -0.06304171036103452 -0.07241723165747403 -0.24709760299667172 -0.2464524828762165 -0.16576276213650265 -0.16708560345285467 --0.04716342309438759 --0.04223688297742566 --0.018339224787634383 --0.05621326427186505 -0.034030477584533134 -0.015112983286377522 --0.006686894148799695 -0.0084135985045764 --0.0409527995366322 --0.041330879541588206 --0.07167408155700153 --0.07610750819868832 --0.09537461461592106 --0.08595044610907085 --0.03565240312207722 --0.036908752736647336 --0.03820593178143043 --0.06473281826001054 --0.04012032394334429 --0.03788220693764348 --0.38124963216329727 --0.42807544746430654 -0.13908474912174423 -0.3384350785635435 --0.06366732312024873 --0.0725640855386424 --0.04043456397190185 --0.05175116106078634 --0.10306101124433735 --0.10222160920574865 -0.26495290044739506 -0.2604726253803136 --0.0554182614697758 --0.1147454497634801 --0.006287103759081175 --0.08823807685369725 -0.09567628293191234 -0.18364291387325196 -0.14014740161102704 -0.05730792075979853 -0.06304171036103452 -0.07211510826670224 -0.24709760299667172 -0.24675719313983843 -0.16576276213650265 -0.1662832108850845 --0.04716342309438759 --0.036858800462481356 --0.018339224787634383 --0.06485242301330693 -0.034030477584533134 --0.001833316760152273 --0.006686894148799695 -0.008107799213101277 --0.0409527995366322 --0.039515631316688726 -0.023153980046429462 -0.028368437709736436 -0.02382835483828985 -0.05668510096794785 -0.05768326430840509 -0.07154526043214227 -0.030043102735118146 -0.04231645787397986 --0.17175820929152086 --0.14520180164338675 -0.0028562069807351372 -0.036598025744808194 --0.029649574367349434 --0.03599847625320862 --0.044957908239197805 --0.029960661741132214 -0.4561885125552172 -0.46062013837736504 --0.012661463769511036 --0.02273628741179249 -0.024635517950243817 -0.028985695188802826 -0.06861234003281175 -0.06896891203654665 -0.010549053211388493 -0.008453432608898706 -0.20904284330405942 -0.213052274459237 -0.07335130956548458 --0.013503329370868473 -0.08037111672081598 -0.07975178731239568 -0.022768386837852123 -0.014691330687326493 -0.18829249971095505 -0.1834474831546366 --0.09443055073415636 --0.11433649025507156 --0.017603453383083965 --0.02450581033427974 -0.023153980046429462 -0.035206263268310826 -0.02382835483828985 -0.07921884186951642 -0.05768326430840509 -0.07108059383978341 -0.030043102735118146 -0.06285698052791111 --0.17175820929152086 --0.16311239604418018 -0.0028562069807351372 -0.09585120070226141 --0.029649574367349434 -0.008434149867197758 --0.044957908239197805 --0.005172810269527511 -0.4561885125552172 -0.4611735283254629 --0.012661463769511036 --0.014583956392028854 -0.024635517950243817 -0.016769887124387435 -0.06861234003281175 -0.022837477304427034 -0.010549053211388493 --0.02916284149819585 -0.20904284330405942 -0.17779321734729808 -0.07335130956548458 --0.00035516640950627006 -0.08037111672081598 -0.02335057573261063 -0.022768386837852123 -0.0025935572691968556 -0.18829249971095505 -0.1843362900913051 --0.09443055073415636 --0.12516161155424058 --0.017603453383083965 --0.03341544379480277 -0.023153980046429462 -0.03056795906937061 -0.02382835483828985 -0.06871705590566193 -0.05768326430840509 -0.06826999150314064 -0.030043102735118146 -0.06251686121956282 --0.17175820929152086 --0.1583926069310994 -0.0028562069807351372 -0.09549477544547744 --0.029649574367349434 -0.005458378484294713 --0.044957908239197805 --0.007512152372008501 -0.4561885125552172 -0.45922425158297886 --0.012661463769511036 --0.012099524333183728 -0.024635517950243817 -0.010166201478040926 -0.06861234003281175 -0.016880844841037745 -0.010549053211388493 --0.023541225896109897 -0.20904284330405942 -0.16934269602225305 -0.07335130956548458 -0.002218562767923835 -0.08037111672081598 -0.013182217830514917 -0.022768386837852123 --0.008410124971168734 -0.18829249971095505 -0.16467866697088315 --0.09443055073415636 --0.12436056994005414 --0.017603453383083965 --0.03351139880158266 \ No newline at end of file diff --git a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/some_experiment_EVAN.py b/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/some_experiment_EVAN.py deleted file mode 100644 index a6a982c37fde9c7c568c48417c4825ba7a5e21b0..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/poster_EVAN2019/shape_prior_check/some_experiment_EVAN.py +++ /dev/null @@ -1,32 +0,0 @@ -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter.abstract_comparison_non_stationary_model import ComparisonAgainstMu, \ - ComparisonAgainstSigma -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevScaleTrendTest, \ - GevLocationTrendTest -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import GevLocationAndScaleTrendTest -from experiment.exceeding_snow_loads.paper1_old import get_full_altitude_visualizer - -POSTER_ALTITUDES = [900, 1800, 2700] -import matplotlib as mpl - -mpl.rcParams['hatch.linewidth'] = 0.3 - - -def main_non_stationary_model_comparison(): - stop_loop = False - for altitude in POSTER_ALTITUDES[:]: - for trend_test_class in [GevLocationTrendTest, GevScaleTrendTest, GevLocationAndScaleTrendTest, - ComparisonAgainstMu, ComparisonAgainstSigma][:]: - vizualiser = get_full_altitude_visualizer(Altitude_Hypercube_Year_Visualizer, altitude=altitude, - exact_starting_year=1958, reduce_strength_array=False, - trend_test_class=trend_test_class, - verbose=False) - # vizualiser.save_to_file = False - vizualiser.visualize_massif_trend_test_one_altitude(poster_plot=True, write_text_on_massif=False) - if stop_loop: - return - - -if __name__ == '__main__': - main_non_stationary_model_comparison() diff --git a/papers/exceeding_snow_loads/paper1_old/utils.py b/papers/exceeding_snow_loads/paper1_old/utils.py deleted file mode 100644 index 55caf1fee2060ab9f5a45bb8d183e00ac818e96b..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/utils.py +++ /dev/null @@ -1,30 +0,0 @@ -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSwe3Days -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.main_files.main_full_hypercube import \ - get_full_parameters -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \ - load_altitude_visualizer -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter import GevLocationTrendTest - -FULL_ALTITUDES = [900, 1200, 1500, 1800, 2100, 2400, 2700, 3000] - - -def get_full_altitude_visualizer(altitude_hypercube_class, exact_starting_year=None, altitude=900, - reduce_strength_array=False, - trend_test_class = GevLocationTrendTest, - offset_starting_year=10, - study_class=CrocusSwe3Days, - orientation=None, - verbose=True): - altitudes, first_starting_year, last_starting_year, nb_data_reduced_for_speed, only_first_one, save_to_file, _ = get_full_parameters( - altitude=altitude, offset_starting_year=offset_starting_year) - if exact_starting_year is not None: - first_starting_year, last_starting_year = None, None - study_classes = [study_class] - visualizer = load_altitude_visualizer(altitude_hypercube_class, altitudes, last_starting_year, - nb_data_reduced_for_speed, only_first_one, save_to_file, study_classes, - trend_test_class, first_starting_year=first_starting_year, - exact_starting_year=exact_starting_year, - orientations=[orientation], - verbose=verbose) - visualizer.reduce_strength_array = reduce_strength_array - return visualizer diff --git a/papers/exceeding_snow_loads/paper1_old/validations/__init__.py b/papers/exceeding_snow_loads/paper1_old/validations/__init__.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/papers/exceeding_snow_loads/paper1_old/validations/main0_comparison_with_observations.py b/papers/exceeding_snow_loads/paper1_old/validations/main0_comparison_with_observations.py deleted file mode 100644 index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000 diff --git a/papers/exceeding_snow_loads/paper1_old/validations/main1_good_stationary_gev_fit.py b/papers/exceeding_snow_loads/paper1_old/validations/main1_good_stationary_gev_fit.py deleted file mode 100644 index f7b764b96784c6241079b5b43a695d6b12bce4ef..0000000000000000000000000000000000000000 --- a/papers/exceeding_snow_loads/paper1_old/validations/main1_good_stationary_gev_fit.py +++ /dev/null @@ -1,23 +0,0 @@ -from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSwe3Days -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - study_iterator_global -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ - StudyVisualizer - - -def maxima_analysis(): - save_to_file = False - only_first_one = False - durand_altitude = [900, 1500, 1800, 2100, 2700][2:-2] - altitudes = durand_altitude - study_classes = [CrocusSwe3Days][:] - for study in study_iterator_global(study_classes, only_first_one=only_first_one, altitudes=altitudes): - study_visualizer = StudyVisualizer(study, save_to_file=save_to_file, - verbose=True, - multiprocessing=True) - # study_visualizer.visualize_summary_of_annual_values_and_stationary_gev_fit() - study_visualizer.visualize_all_mean_and_max_graphs() - - -if __name__ == '__main__': - maxima_analysis() diff --git a/papers/exceeding_snow_loads/paper_utils.py b/papers/exceeding_snow_loads/paper_utils.py index 35e2c51921deab72ebb18c4d2cc303e409994f2e..d3c419e5208144cfed9f27e701cf55c2f200b719 100644 --- a/papers/exceeding_snow_loads/paper_utils.py +++ b/papers/exceeding_snow_loads/paper_utils.py @@ -2,7 +2,7 @@ from enum import Enum from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal, CrocusSnowLoadEurocode, \ CrocusSnowLoad3Days -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ ALL_ALTITUDES_WITHOUT_NAN from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter.gumbel_trend_test_one_parameter import \ GumbelVersusGumbel, GumbelLocationTrendTest, GumbelScaleTrendTest, GevStationaryVersusGumbel @@ -12,7 +12,6 @@ from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two GevLocationAgainstGumbel, GevScaleAgainstGumbel from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gumbel_test_two_parameters import \ GumbelLocationAndScaleTrendTest -from root_utils import get_display_name_from_object_type paper_altitudes = ALL_ALTITUDES_WITHOUT_NAN paper_study_classes = [CrocusSnowLoadTotal, CrocusSnowLoadEurocode, CrocusSnowLoad3Days][:2] diff --git a/papers/exceeding_snow_loads/result_trends_and_return_levels/plot_uncertainty_curves.py b/papers/exceeding_snow_loads/result_trends_and_return_levels/plot_uncertainty_curves.py index 87b8325e4e9938244c3a5816594802150f54e04e..e68a89384931514b7532b5711c0d4bd74320f9a2 100644 --- a/papers/exceeding_snow_loads/result_trends_and_return_levels/plot_uncertainty_curves.py +++ b/papers/exceeding_snow_loads/result_trends_and_return_levels/plot_uncertainty_curves.py @@ -5,10 +5,7 @@ import numpy as np from experiment.eurocode_data.utils import EUROCODE_RETURN_LEVEL_STR, EUROCODE_ALTITUDES, \ YEAR_OF_INTEREST_FOR_RETURN_LEVEL -from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy, filled_marker_legend_list, \ - filled_marker_legend_list2 -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - SCM_STUDY_CLASS_TO_ABBREVIATION +from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy, filled_marker_legend_list2 from papers.exceeding_snow_loads.paper_utils import dpi_paper1_figure, ModelSubsetForUncertainty from papers.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \ StudyVisualizerForNonStationaryTrends @@ -16,8 +13,7 @@ from extreme_fit.model.result_from_model_fit.result_from_extremes.abstract_extra AbstractExtractEurocodeReturnLevel from experiment.eurocode_data.massif_name_to_departement import massif_name_to_eurocode_region from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes -from extreme_fit.model.result_from_model_fit.result_from_extremes.confidence_interval_method import ci_method_to_color, \ - ConfidenceIntervalMethodFromExtremes +from extreme_fit.model.result_from_model_fit.result_from_extremes.confidence_interval_method import ci_method_to_color from root_utils import get_display_name_from_object_type diff --git a/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py b/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py index 68b8b75c67c6255d376c8f0b10d3b0262b11ed00..0bd3c014d3198c7b8ac0e8d5e731e63d37b6b3df 100644 --- a/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py +++ b/papers/exceeding_snow_loads/study_visualizer_for_non_stationary_trends.py @@ -10,10 +10,10 @@ from experiment.eurocode_data.eurocode_region import C2, C1, E from experiment.eurocode_data.massif_name_to_departement import massif_name_to_eurocode_region from experiment.eurocode_data.utils import EUROCODE_QUANTILE, EUROCODE_RETURN_LEVEL_STR, \ YEAR_OF_INTEREST_FOR_RETURN_LEVEL -from experiment.meteo_france_data.plot.create_shifted_cmap import get_shifted_map, get_colors +from experiment.meteo_france_data.scm_models_data.visualization.create_shifted_cmap import get_shifted_map, get_colors from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer from papers.exceeding_snow_loads.check_mcmc_convergence_for_return_levels.gelman_convergence_test import \ compute_gelman_convergence_value @@ -22,10 +22,6 @@ from experiment.trend_analysis.abstract_score import MeanScore from experiment.trend_analysis.univariate_test.extreme_trend_test.abstract_gev_trend_test import AbstractGevTrendTest from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter.gumbel_trend_test_one_parameter import \ GumbelLocationTrendTest, GevStationaryVersusGumbel, GumbelScaleTrendTest, GumbelVersusGumbel -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_three_parameters.gev_trend_test_three_parameters import \ - GevLocationAndScaleTrendTestAgainstGumbel -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gev_trend_test_two_parameters import \ - GevLocationAgainstGumbel, GevScaleAgainstGumbel from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_two_parameters.gumbel_test_two_parameters import \ GumbelLocationAndScaleTrendTest from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \ diff --git a/papers/projection_snow_load/main_difference_between_reanalysis_and_simulations.py b/papers/projection_snow_load/main_difference_between_reanalysis_and_simulations.py index 220143476743f42d53f3659ed7625d00db779a2f..6acff7cecf4608cc6d65e2ad0c350414f4fa715a 100644 --- a/papers/projection_snow_load/main_difference_between_reanalysis_and_simulations.py +++ b/papers/projection_snow_load/main_difference_between_reanalysis_and_simulations.py @@ -1,7 +1,7 @@ from experiment.meteo_france_data.adamont_data.ensemble_simulation import EnsembleSimulation from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoad3Days, \ CrocusSweTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer import matplotlib.pyplot as plt diff --git a/test/test_experiment/test_SCM_study.py b/test/test_experiment/test_SCM_study.py index e7c14277054d7e926c18caa18a15c58061878c0d..c17c2e6ebe0c0456592d9b40dd48528941bf15fd 100644 --- a/test/test_experiment/test_SCM_study.py +++ b/test/test_experiment/test_SCM_study.py @@ -4,13 +4,13 @@ from random import sample import pandas as pd -from experiment.meteo_france_data.scm_models_data.cumulated_study import NB_DAYS +from experiment.meteo_france_data.scm_models_data.safran.cumulated_study import NB_DAYS from experiment.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall, ExtendedSafranSnowfall, \ SafranTemperature, \ SafranPrecipitation -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ study_iterator, study_iterator_global, SCM_STUDIES, ALL_ALTITUDES -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter.gev_trend_test_one_parameter import \ GevLocationTrendTest diff --git a/test/test_experiment/test_coordinate_sensitivity.py b/test/test_experiment/test_coordinate_sensitivity.py index f0eb3a4b1856510e7ba81ac1068c39ae6eb6fafd..d66c1713d80119bb07e83e4f4dfbdd118c5ed5d3 100644 --- a/test/test_experiment/test_coordinate_sensitivity.py +++ b/test/test_experiment/test_coordinate_sensitivity.py @@ -1,9 +1,9 @@ import unittest from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSweTotal -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \ study_iterator_global -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ +from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \ StudyVisualizer from experiment.trend_analysis.non_stationary_trends import \ ConditionalIndedendenceLocationTrendTest diff --git a/test/test_experiment/test_hypercube.py b/test/test_experiment/test_hypercube.py deleted file mode 100644 index 80446d443f02fb818671d13305bda286aa26361e..0000000000000000000000000000000000000000 --- a/test/test_experiment/test_hypercube.py +++ /dev/null @@ -1,56 +0,0 @@ -import unittest -from collections import OrderedDict - -import numpy as np - -from experiment.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall -from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_year_hypercube_visualizer import \ - Altitude_Hypercube_Year_Visualizer -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \ - study_iterator -from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \ - StudyVisualizer -from experiment.trend_analysis.univariate_test.extreme_trend_test.trend_test_one_parameter.gev_trend_test_one_parameter import \ - GevLocationTrendTest -from extreme_fit.model.utils import set_seed_for_test - - -class TestHypercube(unittest.TestCase): - DISPLAY = False - - def setUp(self) -> None: - set_seed_for_test(42) - altitudes = [900, 3000] - - visualizers = [StudyVisualizer(study, temporal_non_stationarity=True, verbose=False, multiprocessing=True) - for study in study_iterator(study_class=SafranSnowfall, only_first_one=False, - altitudes=altitudes, verbose=self.DISPLAY)] - self.altitude_to_visualizer = OrderedDict(zip(altitudes, visualizers)) - self.trend_test_class = GevLocationTrendTest - self.nb_data_reduced_for_speed = 4 - - # def test_altitude_hypercube_visualizer(self): - # visualizer = AltitudeHypercubeVisualizer(self.altitude_to_visualizer, save_to_file=False, - # trend_test_class=self.trend_test_class, - # nb_data_reduced_for_speed=self.nb_data_reduced_for_speed, - # verbose=self.DISPLAY) - # self.df = visualizer.df_hypercube_trend_type - - def test_year_altitude_hypercube_visualizer(self): - visualizer = Altitude_Hypercube_Year_Visualizer(self.altitude_to_visualizer, save_to_file=False, - trend_test_class=self.trend_test_class, - nb_data_reduced_for_speed=self.nb_data_reduced_for_speed, - verbose=self.DISPLAY) - self.df = visualizer.df_hypercube_trend_type - - def tearDown(self) -> None: - if self.DISPLAY: - print(self.df) - # Check that all the rows contain - nb_non_nan_values_per_row = (~self.df.isnull()).sum(axis=1) - equality = nb_non_nan_values_per_row.values == np.ones(len(self.df)) - self.assertTrue(equality.all()) - - -if __name__ == '__main__': - unittest.main() diff --git a/experiment/meteo_france_data/plot/__init__.py b/test/test_extreme_fit/test_function/__init__.py similarity index 100% rename from experiment/meteo_france_data/plot/__init__.py rename to test/test_extreme_fit/test_function/__init__.py diff --git a/test/test_extreme_fit/test_model/test_margin_function.py b/test/test_extreme_fit/test_function/test_margin_function.py similarity index 100% rename from test/test_extreme_fit/test_model/test_margin_function.py rename to test/test_extreme_fit/test_function/test_margin_function.py diff --git a/thesis_report/slides.py b/thesis_report/slides.py deleted file mode 100644 index f6aed506e251817bf36e4f28cc63e1d494e6b5d0..0000000000000000000000000000000000000000 --- a/thesis_report/slides.py +++ /dev/null @@ -1,41 +0,0 @@ -import numpy as np -import matplotlib.pyplot as plt - -from extreme_fit.model.utils import r, set_seed_r - - -def snowfall_plot(flip=False): - set_seed_r(seed=21) - mean, sd = 200, 50 - lim = 200 - x = np.linspace(max(0, mean-lim), mean+lim, 1000) - y = r.dnorm(x, mean=mean, sd=sd) - - if flip: - plt.plot(y, x) - else: - plt.plot(x, y) - plt.legend() - plt.xlabel('snowfall S in mm') - plt.ylabel('P(S)') - - level_to_color = {0.99: 'r', - 0.5: 'g'} - for level, color in level_to_color.items(): - quantile = r.qnorm(p=level, mean=mean, sd=sd) - print(level, color, quantile) - if flip: - plt.plot(r.dnorm(quantile, mean=mean, sd=sd), quantile, color + 'o') - else: - plt.plot(quantile, r.dnorm(quantile, mean=mean, sd=sd), color + 'o') - - # Place the sample - if not flip: - n = 50 - plt.plot(r.rnorm(n=n, mean=mean, sd=sd), np.zeros(n), 'ko') - - plt.show() - - -if __name__ == '__main__': - snowfall_plot(flip=True)