diff --git a/experiment/eurocode_data/departement_alpes_francaises.py b/experiment/eurocode_data/departement_alpes_francaises.py
index 79d005be2070a455a89e8bf4587d7998acd36edc..e6028a8c646952796000b00283e6a695d74ba69a 100644
--- a/experiment/eurocode_data/departement_alpes_francaises.py
+++ b/experiment/eurocode_data/departement_alpes_francaises.py
@@ -1,12 +1,20 @@
 from enum import Enum
 
 from experiment.eurocode_data.eurocode_region import AbstractEurocodeRegion, E, C2, C1
+from root_utils import get_display_name_from_object_type
 
 
 class AbstractDepartementAlpesFrancaises(object):
 
-    def __init__(self, region: type):
-        self.region = region()  # type: AbstractEurocodeRegion
+    def __init__(self, eurocode_region: type):
+        self.eurocode_region = eurocode_region()  # type: AbstractEurocodeRegion
+
+    def display_limit(self, ax):
+        pass
+
+    def __str__(self):
+        return get_display_name_from_object_type(type(self)) + '( {} Eurocode region)'.format(
+            get_display_name_from_object_type(type(self.eurocode_region)))
 
 
 class HauteSavoie(AbstractDepartementAlpesFrancaises):
@@ -32,6 +40,7 @@ class Drome(AbstractDepartementAlpesFrancaises):
     def __init__(self):
         super().__init__(C2)
 
+
 class HautesAlpes(AbstractDepartementAlpesFrancaises):
 
     def __init__(self):
@@ -48,6 +57,3 @@ class AlpesDeHauteProvence(AbstractDepartementAlpesFrancaises):
 
     def __init__(self):
         super().__init__(C1)
-
-
-
diff --git a/experiment/eurocode_data/eurocode_drawing.py b/experiment/eurocode_data/eurocode_drawing.py
new file mode 100644
index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391
diff --git a/experiment/eurocode_data/eurocode_region.py b/experiment/eurocode_data/eurocode_region.py
index 2b70fdd60f7cbae714bc54407d5965c00a7670dc..722cc33dbc60ebbfca20f3aaa6f78a451d113a51 100644
--- a/experiment/eurocode_data/eurocode_region.py
+++ b/experiment/eurocode_data/eurocode_region.py
@@ -40,6 +40,10 @@ class AbstractEurocodeRegion(object):
     def lois_de_variation_1000_and_2000(self):
         return 3.5, -2.45
 
+    def plot_max_loading(self, ax, altitudes):
+        ax.plot(altitudes, [self.eurocode_max_loading(altitude) for altitude in altitudes],
+                label='Eurocode limit')
+
 
 class C1(AbstractEurocodeRegion):
 
diff --git a/experiment/eurocode_data/eurocode_return_level_uncertainties.py b/experiment/eurocode_data/eurocode_return_level_uncertainties.py
new file mode 100644
index 0000000000000000000000000000000000000000..1e416a8a78583d35b7887833ea1f96a9b734ed1e
--- /dev/null
+++ b/experiment/eurocode_data/eurocode_return_level_uncertainties.py
@@ -0,0 +1,79 @@
+from typing import List
+
+
+import numpy as np
+from cached_property import cached_property
+
+from experiment.eurocode_data.utils import EUROCODE_QUANTILE
+from experiment.trend_analysis.univariate_test.utils import load_temporal_coordinates_and_dataset, \
+    fitted_linear_margin_estimator
+from extreme_fit.distribution.gev.gev_params import GevParams
+from extreme_fit.estimator.abstract_estimator import AbstractEstimator
+from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
+from extreme_fit.estimator.utils import load_margin_function
+from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \
+    AbstractTemporalLinearMarginModel
+from extreme_fit.model.margin_model.margin_function.linear_margin_function import LinearMarginFunction
+from extreme_fit.model.result_from_model_fit.result_from_extremes import ResultFromExtremes
+
+
+class ExtractEurocodeReturnLevelFromExtremes(object):
+
+    def __init__(self, estimator: LinearMarginEstimator, year_of_interest: int = 2017):
+        self.estimator = estimator
+        self.result_from_fit = self.estimator.result_from_model_fit # type: ResultFromExtremes
+        self.year_of_interest = year_of_interest
+
+    @property
+    def margin_functions_from_fit(self) -> List[LinearMarginFunction]:
+        margin_functions = []
+        for _, s in self.result_from_fit.df_posterior_samples.iterrows():
+            coef_dict = self.result_from_fit.get_coef_dict_from_posterior_sample(s)
+            margin_function = load_margin_function(self.estimator, self.estimator.margin_model, coef_dict=coef_dict)
+            margin_functions.append(margin_function)
+        return margin_functions
+
+    @property
+    def gev_params_from_fit_for_year_of_interest(self) -> List[GevParams]:
+        return [m.get_gev_params(coordinate=np.array([self.year_of_interest]), is_transformed=False)
+                for m in self.margin_functions_from_fit]
+
+    @cached_property
+    def posterior_eurocode_return_level_samples_for_year_of_interest(self) -> np.ndarray:
+        """We divide by 100 to transform the snow water equivalent into snow load"""
+        return np.array([p.quantile(EUROCODE_QUANTILE) for p in self.gev_params_from_fit_for_year_of_interest]) / 100
+
+    @property
+    def posterior_mean_eurocode_return_level_for_the_year_of_interest(self) -> np.ndarray:
+        return np.mean(self.posterior_eurocode_return_level_samples_for_year_of_interest)
+
+    @property
+    def posterior_eurocode_return_level_uncertainty_interval_for_the_year_of_interest(self):
+        bottom_and_upper_quantile = (0.05, 0.95)
+        return [np.quantile(self.posterior_eurocode_return_level_samples_for_year_of_interest, q=q)
+                for q in bottom_and_upper_quantile]
+
+
+class EurocodeLevelUncertaintyFromExtremes(object):
+
+    YEAR_OF_INTEREST = 2017
+
+    def __init__(self, posterior_mean, poster_uncertainty_interval):
+        self.posterior_mean = posterior_mean
+        self.poster_uncertainty_interval = poster_uncertainty_interval
+
+    @classmethod
+    def from_estimator_extremes(cls, estimator_extremes: LinearMarginEstimator):
+        extractor = ExtractEurocodeReturnLevelFromExtremes(estimator_extremes, cls.YEAR_OF_INTEREST)
+        return cls(extractor.posterior_mean_eurocode_return_level_for_the_year_of_interest,
+                   extractor.posterior_eurocode_return_level_uncertainty_interval_for_the_year_of_interest)
+
+    @classmethod
+    def from_maxima_years_model_class(cls, maxima, years, model_class):
+        # Load coordinates and dataset
+        coordinates, dataset = load_temporal_coordinates_and_dataset(maxima, years)
+        # Fitted estimator
+        fitted_estimator = fitted_linear_margin_estimator(model_class, coordinates, dataset, starting_year=1958,
+                                                          fit_method=AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR)
+        # Load object from result from extremes
+        return cls.from_estimator_extremes(fitted_estimator)
\ No newline at end of file
diff --git a/experiment/eurocode_data/eurocode_visualizer.py b/experiment/eurocode_data/eurocode_visualizer.py
index 387a8042aa53b71b49f275060a7088cc4272d294..b5231235247479290a877397e1889394760561b6 100644
--- a/experiment/eurocode_data/eurocode_visualizer.py
+++ b/experiment/eurocode_data/eurocode_visualizer.py
@@ -1,31 +1,57 @@
-from collections import OrderedDict
+from typing import Dict, List
+
 import matplotlib.pyplot as plt
 
-from experiment.eurocode_data.massif_name_to_departement import massif_name_to_departement_objects
-from experiment.eurocode_data.eurocode_region import AbstractEurocodeRegion
-from utils import get_display_name_from_object_type
-
-
-def display_region_limit(region_type, altitudes, ordered_massif_name_to_quantiles,
-                         ordered_massif_name_to_significances=None,
-                         display=True):
-    assert isinstance(ordered_massif_name_to_quantiles, OrderedDict)
-    assert ordered_massif_name_to_significances is None or isinstance(ordered_massif_name_to_significances, OrderedDict)
-    # First, select massif name correspond to the region
-    massif_name_belong_to_the_region = []
-    for massif_name in ordered_massif_name_to_quantiles.keys():
-        if any([isinstance(dep.region, region_type) for dep in massif_name_to_departement_objects[massif_name]]):
-            massif_name_belong_to_the_region.append(massif_name)
-    region_object = region_type()  # type: AbstractEurocodeRegion
-    # Then, display the limit for the region
-    fig, ax = plt.subplots(1, 1)
-    ax.plot(altitudes, [region_object.eurocode_max_loading(altitude) for altitude in altitudes], label='Eurocode limit')
-    # Finally, display the massif curve
-    for massif_name in massif_name_belong_to_the_region:
-        ax.plot(altitudes, ordered_massif_name_to_quantiles[massif_name], label=massif_name)
-    ax.set_title('{} Eurocode region'.format(get_display_name_from_object_type(region_type)))
-    ax.set_xlabel('Altitude')
-    ax.set_ylabel('0.98 quantile (in N $m^-2$)')
-    ax.legend()
-    if display:
+from experiment.eurocode_data.eurocode_return_level_uncertainties import EurocodeLevelUncertaintyFromExtremes
+from experiment.eurocode_data.massif_name_to_departement import DEPARTEMENT_TYPES
+from experiment.eurocode_data.utils import EUROCODE_QUANTILE, EUROCODE_ALTITUDES
+from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes
+
+
+def plot_model_name_to_dep_to_ordered_return_level_uncertainties(
+        dep_to_model_name_to_ordered_return_level_uncertainties, show=True):
+    # Create a 9 x 9 plot
+    axes = create_adjusted_axes(3, 3)
+    axes = list(axes.flatten())
+    ax6 = axes[5]
+    ax9 = axes[8]
+
+    axes.remove(ax6)
+    axes.remove(ax9)
+    ax_to_departement = dict(zip(axes, DEPARTEMENT_TYPES[::-1]))
+    for ax, departement in ax_to_departement.items():
+        plot_dep_to_model_name_dep_to_ordered_return_level_uncertainties(ax, departement,
+                                                                         dep_to_model_name_to_ordered_return_level_uncertainties[
+                                                                             departement]
+                                                                         )
+    ax6.remove()
+    ax9.remove()
+
+    plt.suptitle('50-year return levels for all French Alps departements. \n'
+                 'Comparison between the maximum EUROCODE in the departement\n'
+                 'and the maximum return level found for the massif belonging to the departement')
+    if show:
         plt.show()
+
+
+def plot_dep_to_model_name_dep_to_ordered_return_level_uncertainties(ax, dep_class,
+                                                                     model_name_to_ordered_return_level_uncertainties:
+                                                                     Dict[str, List[
+                                                                         EurocodeLevelUncertaintyFromExtremes]]):
+    colors = ['red', 'blue', 'green']
+    altitudes = EUROCODE_ALTITUDES
+    alpha = 0.2
+    # Display the EUROCODE return level
+    dep_object = dep_class()
+    dep_object.eurocode_region.plot_max_loading(ax, altitudes=altitudes)
+    # Display the return level from model class
+    for color, (model_name, ordered_return_level_uncertaines) in zip(colors,
+                                                                     model_name_to_ordered_return_level_uncertainties.items()):
+        mean = [r.posterior_mean for r in ordered_return_level_uncertaines]
+        ax.plot(altitudes, mean, '-', color=color)
+        lower_bound = [r.poster_uncertainty_interval[0] for r in ordered_return_level_uncertaines]
+        upper_bound = [r.poster_uncertainty_interval[1] for r in ordered_return_level_uncertaines]
+        ax.fill_between(altitudes, lower_bound, upper_bound, color=color, alpha=alpha)
+    ax.set_title(str(dep_object))
+    ax.set_ylabel('Maximum {} quantile (in N $m^-2$)'.format(EUROCODE_QUANTILE))
+    ax.set_xlabel('Altitude')
diff --git a/experiment/eurocode_data/main_eurocode_drawing.py b/experiment/eurocode_data/main_eurocode_drawing.py
new file mode 100644
index 0000000000000000000000000000000000000000..569cb9a843054e468cd286457782844afebb7b98
--- /dev/null
+++ b/experiment/eurocode_data/main_eurocode_drawing.py
@@ -0,0 +1,63 @@
+from collections import OrderedDict
+
+from experiment.eurocode_data.eurocode_visualizer import plot_model_name_to_dep_to_ordered_return_level_uncertainties
+from experiment.eurocode_data.massif_name_to_departement import DEPARTEMENT_TYPES
+from experiment.eurocode_data.utils import EUROCODE_ALTITUDES
+from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSwe3Days
+from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \
+    AltitudeHypercubeVisualizer
+from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \
+    load_altitude_visualizer
+from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import StationaryStationModel, \
+    NonStationaryLocationAndScaleModel
+from root_utils import get_display_name_from_object_type
+
+
+# Model class
+
+
+def dep_to_ordered_return_level_uncertainties(model_class, last_year_for_the_data):
+    model_name = get_display_name_from_object_type(type(model_class)) + ' ' + str(last_year_for_the_data)
+    # Load altitude visualizer
+    # todo: add last years attributes that enables to change the years
+    altitude_visualizer = load_altitude_visualizer(AltitudeHypercubeVisualizer, altitudes=EUROCODE_ALTITUDES,
+                                                   last_starting_year=None, nb_data_reduced_for_speed=False,
+                                                   only_first_one=False, save_to_file=False,
+                                                   exact_starting_year=1958,
+                                                   first_starting_year=None,
+                                                   study_classes=[CrocusSwe3Days],
+                                                   trend_test_class=None)
+    # Loop on the data
+    assert isinstance(altitude_visualizer.tuple_to_study_visualizer, OrderedDict)
+    dep_to_ordered_return_level_uncertainty = {dep: [] for dep in DEPARTEMENT_TYPES}
+    for visualizer in altitude_visualizer.tuple_to_study_visualizer.values():
+        dep_to_return_level_uncertainty = visualizer.dep_class_to_eurocode_level_uncertainty(model_class)
+        for dep, return_level_uncertainty in dep_to_return_level_uncertainty.items():
+            dep_to_ordered_return_level_uncertainty[dep].append(return_level_uncertainty)
+
+    return {model_name: dep_to_ordered_return_level_uncertainty}
+
+
+def main_drawing():
+    model_class_and_last_year = [
+        (StationaryStationModel, 1991),
+        (StationaryStationModel, 2017),
+        (NonStationaryLocationAndScaleModel, 2017),
+    ][:1]
+    model_name_to_dep_to_ordered_return_level = {}
+    for model_class, last_year_for_the_data in model_class_and_last_year:
+        model_name_to_dep_to_ordered_return_level.update(
+            dep_to_ordered_return_level_uncertainties(model_class, last_year_for_the_data))
+    # Transform the dictionary into the desired format
+    dep_to_model_name_to_ordered_return_level_uncertainties = {}
+    for dep in DEPARTEMENT_TYPES:
+        d2 = {model_name: model_name_to_dep_to_ordered_return_level[model_name][dep] for model_name in
+              model_name_to_dep_to_ordered_return_level.keys()}
+        dep_to_model_name_to_ordered_return_level_uncertainties[dep] = d2
+    # Plot graph
+    plot_model_name_to_dep_to_ordered_return_level_uncertainties(
+        dep_to_model_name_to_ordered_return_level_uncertainties, show=True)
+
+
+if __name__ == '__main__':
+    main_drawing()
diff --git a/experiment/eurocode_data/massif_name_to_departement.py b/experiment/eurocode_data/massif_name_to_departement.py
index 2e585248588ae06422227f0d533c937862e7b7d2..d059be32ebd84f6566f1d8fb0130215aaa64f1e7 100644
--- a/experiment/eurocode_data/massif_name_to_departement.py
+++ b/experiment/eurocode_data/massif_name_to_departement.py
@@ -3,8 +3,6 @@ from typing import Dict, List
 from experiment.eurocode_data.departement_alpes_francaises import HauteSavoie, Savoie, Isere, Drome, HautesAlpes, \
     AlpesDeHauteProvence, AlpesMaritimes, AbstractDepartementAlpesFrancaises
 
-
-
 massif_name_to_departement_types = {
     'Chablais': [HauteSavoie],
     'Aravis': [HauteSavoie, Savoie],
@@ -30,17 +28,15 @@ massif_name_to_departement_types = {
     'Haut_Var-Haut_Verdon': [AlpesDeHauteProvence],
     'Mercantour': [AlpesMaritimes, AlpesDeHauteProvence]}
 
-massif_name_to_departement_objects = {m: [d() for d in deps] for m, deps in massif_name_to_departement_types.items()}  # type: Dict[str, List[AbstractDepartementAlpesFrancaises]]
+massif_name_to_departement_objects = {m: [d() for d in deps] for m, deps in
+                                      massif_name_to_departement_types.items()}  # type: Dict[str, List[AbstractDepartementAlpesFrancaises]]
 
 DEPARTEMENT_TYPES = [HauteSavoie, Savoie, Isere, Drome, HautesAlpes, AlpesMaritimes, AlpesDeHauteProvence]
 
-departement_type_to_massif_names = {dep: [k for k, v in massif_name_to_departement_types.items() if dep in v]
-    for dep in DEPARTEMENT_TYPES
-}
+dep_class_to_massif_names = {dep: [k for k, v in massif_name_to_departement_types.items() if dep in v]
+                             for dep in DEPARTEMENT_TYPES
+                             }
 
 if __name__ == '__main__':
-    for k, v in departement_type_to_massif_names.items():
+    for k, v in dep_class_to_massif_names.items():
         print(k, v)
-
-
-
diff --git a/experiment/eurocode_data/utils.py b/experiment/eurocode_data/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..ef3b88eb415ca5b23223fce8be45c9a73636e9db
--- /dev/null
+++ b/experiment/eurocode_data/utils.py
@@ -0,0 +1,5 @@
+
+# Eurocode quantile correspond to a 50 year return period
+EUROCODE_QUANTILE = 0.98
+# Altitudes (between low and mid altitudes) < 2000m
+EUROCODE_ALTITUDES = [900, 1200, 1500, 1800][:2]
\ No newline at end of file
diff --git a/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py b/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py
index b832fe110e54aa817fbabdbf8b07149cb23ce3ec..d9c8c9e5cea9d75d825eff14eb43d7fb338ed0b8 100644
--- a/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py
+++ b/experiment/meteo_france_data/scm_models_data/abstract_extended_study.py
@@ -4,7 +4,7 @@ from collections import OrderedDict
 from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy
 from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_coordinates import \
     AbstractSpatialCoordinates
-from utils import classproperty
+from root_utils import classproperty
 
 
 class AbstractExtendedStudy(AbstractStudy):
diff --git a/experiment/meteo_france_data/scm_models_data/abstract_study.py b/experiment/meteo_france_data/scm_models_data/abstract_study.py
index a10ac33e62c6ba0af54be9c6ec0209b3b198050d..b16ead03bcd1eb286b72bab364937aef29fd470d 100644
--- a/experiment/meteo_france_data/scm_models_data/abstract_study.py
+++ b/experiment/meteo_france_data/scm_models_data/abstract_study.py
@@ -30,7 +30,7 @@ from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoo
 from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_coordinates import \
     AbstractSpatialCoordinates
 from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima
-from utils import get_full_path, cached_property, NB_CORES, classproperty
+from root_utils import get_full_path, cached_property, NB_CORES, classproperty
 
 f = io.StringIO()
 with redirect_stdout(f):
diff --git a/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py b/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py
index 183b0818b3cb72c8b122d5f92fcbce4cc5ce4a21..8de3a72f400c653d9fbf92b2d5847c1c4105fd9e 100644
--- a/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py
+++ b/experiment/meteo_france_data/scm_models_data/crocus/taline_data.py
@@ -5,7 +5,7 @@ import pandas as pd
 
 from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusDepth
 from experiment.meteo_france_data.scm_models_data.scm_constants import ALTITUDES
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 massif_name = 'Queyras'
 study_class = CrocusDepth
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py
index 4cfb5d70450da977f3c4a5bd799f443b0c0f1ae1..30b3417d1ea334048bac50cb326bea55f8f832e3 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/abstract_hypercube_visualizer.py
@@ -7,7 +7,7 @@ import pandas as pd
 
 from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \
     StudyVisualizer
-from utils import cached_property, VERSION_TIME, get_display_name_from_object_type
+from root_utils import cached_property, VERSION_TIME, get_display_name_from_object_type
 
 
 class AbstractHypercubeVisualizer(object):
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py
index dc3fd7973aef1d0f3ecde8aa610bcf5e28757130..d918abe93629d8ca3a5cf7700d34e2b9fafdc953 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/altitude_hypercube_visualizer.py
@@ -12,7 +12,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat
     StudyVisualizer
 from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import AbstractGevTrendTest
 from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 ALTITUDES_XLABEL = 'altitudes'
 
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py
index 822bd604e5a2a7fc82b907a0f0fcaff8aa4602ea..086e668bbcbe22053e9937e232abc6c72c37f854 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_fast_hypercube_several_altitudes.py
@@ -1,12 +1,7 @@
 import time
 
-from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \
-    AltitudeHypercubeVisualizer
 from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \
-    AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrendExtended, \
-    AltitudeHypercubeVisualizerWithoutTrendExtended, QuantityHypercubeWithoutTrend
-from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.quantity_altitude_visualizer import \
-    QuantityAltitudeHypercubeVisualizer
+    AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend
 from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \
     load_altitude_visualizer, load_quantity_visualizer
 from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py
index 4ede274dabcd304b7874349a98cb4d73219f15f9..21311fc226b259ed6063df1202555e875a49bbd5 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_full_hypercube.py
@@ -1,12 +1,7 @@
 import time
 
-from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer import \
-    AltitudeHypercubeVisualizer
 from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.altitude_hypercube_visualizer_extended import \
-    AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrendExtended, \
-    AltitudeHypercubeVisualizerWithoutTrendExtended, QuantityHypercubeWithoutTrend
-from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.quantity_altitude_visualizer import \
-    QuantityAltitudeHypercubeVisualizer
+    AltitudeHypercubeVisualizerBisExtended, QuantityHypercubeWithoutTrend
 from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visualization.utils_hypercube import \
     load_altitude_visualizer, load_quantity_visualizer
 from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py
index e11a6d31f22729975efaf1ca7349a796086903f3..011477f7efff572b3f60859ccaf1f8158e33d272 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/main_files/main_poster_IMSC2019.py
@@ -9,7 +9,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.hypercube_visual
     load_altitude_visualizer
 from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.main_study_visualizer import \
     SCM_STUDIES, altitude_massif_name_and_study_class_for_poster, SCM_STUDIES_NAMES, SCM_STUDY_NAME_TO_SCM_STUDY
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 POSTER_ALTITUDES = [900, 1800, 2700]
 
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py
index 608b484227778fa1fe7ef788409107bf6d6f296e..5b7fbcb94e2986f316f81c40743ee36570f67099 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/hypercube_visualization/utils_hypercube.py
@@ -9,7 +9,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat
     study_iterator_global
 from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \
     StudyVisualizer
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 
 def load_quantity_visualizer(quantity_hypercube_class, altitudes, last_starting_year, nb_data_reduced_for_speed,
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py
index 92da9208e0af6d725a96d763f03e3d49158ba86a..f553d1ac624b5194fe5b6424b1acf76e3cb47510 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/studies_visualization/studies_visualizer.py
@@ -15,7 +15,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat
     StudyVisualizer
 from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest
 from experiment.meteo_france_data.scm_models_data.visualization.utils import plot_df
-from utils import cached_property, get_display_name_from_object_type, VERSION_TIME
+from root_utils import cached_property, get_display_name_from_object_type, VERSION_TIME
 
 
 class StudiesVisualizer(object):
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py
index 6dcb3303a4be9b2967dba446fa57c02c186f5ff4..aeb8f63e479ade6850960b6850c7e7a571696c14 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/main_study_visualizer.py
@@ -16,7 +16,7 @@ from collections import OrderedDict
 from experiment.trend_analysis.univariate_test.gev_trend_test_one_parameter import GevLocationTrendTest
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \
     BetweenZeroAndOneNormalization, BetweenMinusOneAndOneNormalization
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 SCM_STUDIES = [SafranSnowfall, CrocusSweTotal, CrocusDepth, CrocusSwe3Days]
 SCM_STUDIES_NAMES = [get_display_name_from_object_type(k) for k in SCM_STUDIES]
diff --git a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py
index 7263aa3da2f39006d2e1e0873847107c936d712d..4b361e437ea578391d96d43081d745564cf06d6a 100644
--- a/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py
+++ b/experiment/meteo_france_data/scm_models_data/visualization/study_visualization/study_visualizer.py
@@ -10,6 +10,9 @@ import numpy as np
 import pandas as pd
 import seaborn as sns
 
+from experiment.eurocode_data.eurocode_return_level_uncertainties import ExtractEurocodeReturnLevelFromExtremes, \
+    EurocodeLevelUncertaintyFromExtremes
+from experiment.eurocode_data.massif_name_to_departement import dep_class_to_massif_names
 from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy
 from experiment.trend_analysis.abstract_score import MeanScore, AbstractTrendScore
 from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy
@@ -17,13 +20,14 @@ from experiment.trend_analysis.univariate_test.abstract_univariate_test import A
 from experiment.trend_analysis.non_stationary_trends import \
     ConditionalIndedendenceLocationTrendTest, MaxStableLocationTrendTest, IndependenceLocationTrendTest
 from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes
-from experiment.trend_analysis.univariate_test.utils import compute_gev_change_point_test_results
+from experiment.trend_analysis.univariate_test.univariate_test_results import compute_gev_change_point_test_results
 from experiment.utils import average_smoothing_with_sliding_window
 from extreme_fit.distribution.abstract_params import AbstractParams
 from extreme_fit.estimator.full_estimator.abstract_full_estimator import \
     FullEstimatorInASingleStepWithSmoothMargin
 from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
-from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import LinearNonStationaryLocationMarginModel, \
+from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import \
+    LinearNonStationaryLocationMarginModel, \
     LinearStationaryMarginModel
 from extreme_fit.model.margin_model.margin_function.abstract_margin_function import \
     AbstractMarginFunction
@@ -42,7 +46,7 @@ from spatio_temporal_dataset.coordinates.temporal_coordinates.generated_temporal
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
 from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima
 from test.test_utils import load_test_max_stable_models
-from utils import get_display_name_from_object_type, VERSION_TIME, float_to_str_with_only_some_significant_digits, \
+from root_utils import get_display_name_from_object_type, VERSION_TIME, float_to_str_with_only_some_significant_digits, \
     cached_property
 
 BLOCK_MAXIMA_DISPLAY_NAME = 'block maxima '
@@ -350,6 +354,36 @@ class StudyVisualizer(VisualizationParameters):
         start_year, stop_year = self.study.start_year_and_stop_year
         return list(range(start_year, stop_year))
 
+    def massif_name_to_eurocode_level_uncertainty(self, model_class) -> Dict[str, EurocodeLevelUncertaintyFromExtremes]:
+        # todo: add multiprocessing
+        massif_name_to_eurocode_return_level_uncertainty = {}
+        for massif_id, massif_name in enumerate(self.study.study_massif_names):
+            print(massif_name)
+            years, smooth_maxima = self.smooth_maxima_x_y(massif_id)
+            res = EurocodeLevelUncertaintyFromExtremes.from_maxima_years_model_class(smooth_maxima, years, model_class)
+            massif_name_to_eurocode_return_level_uncertainty[massif_name] = res
+        return massif_name_to_eurocode_return_level_uncertainty
+
+    def dep_class_to_eurocode_level_uncertainty(self, model_class):
+        """ Take the max with respect to the posterior mean for each departement """
+        massif_name_to_eurocode_level_uncertainty = self.massif_name_to_eurocode_level_uncertainty(model_class)
+        print(massif_name_to_eurocode_level_uncertainty)
+        dep_class_to_eurocode_level_uncertainty = {}
+        for dep_class, massif_names in dep_class_to_massif_names.items():
+            # Filter the couple of interest
+            couples = [(k, v) for k, v in massif_name_to_eurocode_level_uncertainty.items() if k in massif_names]
+            assert len(couples) > 0
+            # Find the massif name with the maximum posterior mean
+            for c in couples:
+                print(c)
+                print(c[1].posterior_mean)
+            massif_name, eurocode_level_uncertainty = max(couples, key=lambda c: c[1].posterior_mean)
+            dep_class_to_eurocode_level_uncertainty[dep_class] = eurocode_level_uncertainty
+        return dep_class_to_eurocode_level_uncertainty
+
+
+
+
     @cached_property
     def massif_name_to_detailed_scores(self):
         """
@@ -430,7 +464,8 @@ class StudyVisualizer(VisualizationParameters):
                 massif_names = [AbstractExtendedStudy.region_name_to_massif_names[r][0]
                                 for r in AbstractExtendedStudy.real_region_names]
                 massif_names_for_sampling = list(set(self.study.study_massif_names) - set(massif_names))
-                nb_massif_for_sampling = self.nb_massif_for_change_point_test - len(AbstractExtendedStudy.real_region_names)
+                nb_massif_for_sampling = self.nb_massif_for_change_point_test - len(
+                    AbstractExtendedStudy.real_region_names)
                 massif_names += sample(massif_names_for_sampling, k=nb_massif_for_sampling)
             else:
                 massif_names = sample(self.study.study_massif_names, k=self.nb_massif_for_change_point_test)
diff --git a/experiment/meteo_france_data/stations_data/comparison_analysis.py b/experiment/meteo_france_data/stations_data/comparison_analysis.py
index c85b1cdc7cdb9de087cab91507d8744f65df9eb9..ee5160279ff0ab29cadfe7a968ae0ca9bb98dbdc 100644
--- a/experiment/meteo_france_data/stations_data/comparison_analysis.py
+++ b/experiment/meteo_france_data/stations_data/comparison_analysis.py
@@ -20,7 +20,7 @@ from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
 from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \
     AbstractSpatioTemporalObservations
 from test.test_utils import load_test_max_stable_models
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 REANALYSE_STR = 'reanalyse'
 ALTITUDE_COLUMN_NAME = 'ALTITUDE'
diff --git a/experiment/meteo_france_data/stations_data/main_spatial_comparison.py b/experiment/meteo_france_data/stations_data/main_spatial_comparison.py
index 80374664c00543d036e6a1a7815fb2b9faf64a22..b2bdf90a1b60fd875c0e79637e181f6ff2dd3e78 100644
--- a/experiment/meteo_france_data/stations_data/main_spatial_comparison.py
+++ b/experiment/meteo_france_data/stations_data/main_spatial_comparison.py
@@ -2,7 +2,7 @@ from experiment.meteo_france_data.stations_data.comparison_analysis import Compa
 from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import LinearAllParametersAllDimsMarginModel
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \
     BetweenZeroAndOneNormalization
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 
 def choice_of_altitude_and_nb_border_data_to_remove_to_get_data_without_nan():
diff --git a/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py b/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py
index af0fd9b579eedf432b5b5826a7e3e16618f7ddeb..7be7297adac50bdefe23884753541c8b43e7faf5 100644
--- a/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py
+++ b/experiment/meteo_france_data/stations_data/visualization/comparisons_visualization/comparisons_visualization.py
@@ -15,7 +15,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat
 from experiment.meteo_france_data.stations_data.comparison_analysis import ComparisonAnalysis, MASSIF_COLUMN_NAME, \
     REANALYSE_STR, ALTITUDE_COLUMN_NAME, STATION_COLUMN_NAME
 from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest
-from experiment.trend_analysis.univariate_test.utils import compute_gev_change_point_test_results
+from experiment.trend_analysis.univariate_test.univariate_test_results import compute_gev_change_point_test_results
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 
 path = r'/home/erwan/Documents/projects/spatiotemporalextremes/experiment/meteo_france_data/stations_data/csv'
diff --git a/experiment/robustness_plot/single_plot.py b/experiment/robustness_plot/single_plot.py
index 70f68923dbcedc3c8869538e704309167113334b..e2fe71331960c1ee1fdb0e761887f49bf47eb645 100644
--- a/experiment/robustness_plot/single_plot.py
+++ b/experiment/robustness_plot/single_plot.py
@@ -7,7 +7,7 @@ from itertools import product
 
 from extreme_fit.estimator.abstract_estimator import AbstractEstimator
 from experiment.robustness_plot.display_item import DisplayItem
-from utils import get_full_path
+from root_utils import get_full_path
 
 plt.style.use('seaborn-white')
 
diff --git a/experiment/simulation/abstract_simulation.py b/experiment/simulation/abstract_simulation.py
index 9e7370c561f02032c08e4f9b85cccd18fa86063e..bc4e14e3b3c15b7ac6f2ffdf271e32b892fe0bda 100644
--- a/experiment/simulation/abstract_simulation.py
+++ b/experiment/simulation/abstract_simulation.py
@@ -23,7 +23,7 @@ from extreme_fit.distribution.gev.gev_params import GevParams
 from spatio_temporal_dataset.dataset.abstract_dataset import get_subset_dataset
 from spatio_temporal_dataset.dataset.simulation_dataset import SimulatedDataset
 from spatio_temporal_dataset.slicer.split import split_to_display_kwargs
-from utils import get_full_path, get_display_name_from_object_type
+from root_utils import get_full_path, get_display_name_from_object_type
 
 SIMULATION_RELATIVE_PATH = op.join('local', 'simulation')
 
diff --git a/experiment/trend_analysis/non_stationary_trends.py b/experiment/trend_analysis/non_stationary_trends.py
index 3b6a362e62c9b3b9db57812b21d87d3d47634246..ab4debc4f54722d021ee1e58805fcdc15ef7ccaa 100644
--- a/experiment/trend_analysis/non_stationary_trends.py
+++ b/experiment/trend_analysis/non_stationary_trends.py
@@ -17,7 +17,7 @@ from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_m
 from extreme_fit.model.margin_model.margin_function.linear_margin_function import LinearMarginFunction
 from extreme_fit.model.utils import OptimizationConstants
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 
 class AbstractNonStationaryTrendTest(object):
diff --git a/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py b/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py
index af9ae8994711e5bf5dd7d6b2d6bb1c584f4c8b13..cbea66c2a8f10d8edaccc9a42652d5bab2c3f399 100644
--- a/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py
+++ b/experiment/trend_analysis/univariate_test/abstract_gev_trend_test.py
@@ -3,49 +3,38 @@ import pandas as pd
 from cached_property import cached_property
 from scipy.stats import chi2
 
+from experiment.eurocode_data.utils import EUROCODE_QUANTILE
 from experiment.trend_analysis.univariate_test.abstract_univariate_test import AbstractUnivariateTest
+from experiment.trend_analysis.univariate_test.utils import load_temporal_coordinates_and_dataset, \
+    fitted_linear_margin_estimator
 from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
-from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import AbstractTemporalLinearMarginModel
+from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \
+    AbstractTemporalLinearMarginModel
 from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import \
     StationaryStationModel
 from extreme_fit.model.utils import SafeRunException
 from extreme_fit.distribution.gev.gev_params import GevParams
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
-from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \
-    AbstractTemporalCoordinates
-from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.abstract_transformation import \
-    CenteredScaledNormalization
-from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \
-    AbstractSpatioTemporalObservations
 
 
 class AbstractGevTrendTest(AbstractUnivariateTest):
     RRunTimeError_TREND = 'R RunTimeError trend'
     # I should use the quantile from the Eurocode for the buildings
-    quantile_for_strength = 0.98
+    quantile_for_strength = EUROCODE_QUANTILE
     nb_years_for_quantile_evolution = 10
 
-    def __init__(self, years, maxima, starting_year, unconstrained_model_class, constrained_model_class=StationaryStationModel,
+    def __init__(self, years, maxima, starting_year, unconstrained_model_class,
+                 constrained_model_class=StationaryStationModel,
                  fit_method=AbstractTemporalLinearMarginModel.ISMEV_GEV_FIT_METHOD_STR):
         super().__init__(years, maxima, starting_year)
         self.fit_method = fit_method
         # Load observations, coordinates and datasets
-        df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: years})
-        df_maxima_gev = pd.DataFrame(maxima, index=df.index)
-        observations = AbstractSpatioTemporalObservations(df_maxima_gev=df_maxima_gev)
-        self.coordinates = AbstractTemporalCoordinates.from_df(df, transformation_class=CenteredScaledNormalization)
-        self.dataset = AbstractDataset(observations=observations, coordinates=self.coordinates)
-
+        self.coordinates, self.dataset = load_temporal_coordinates_and_dataset(maxima, years)
         try:
             # Fit constrained model
-            constrained_model = constrained_model_class(self.coordinates, starting_point=self.starting_year, fit_method=self.fit_method)
-            self.constrained_estimator = LinearMarginEstimator(self.dataset, constrained_model)
-            self.constrained_estimator.fit()
+            self.constrained_estimator = fitted_linear_margin_estimator(constrained_model_class, self.coordinates, self.dataset, self.starting_year, self.fit_method)
             # Fit unconstrained model
-            unconstrained_model = unconstrained_model_class(self.coordinates, starting_point=self.starting_year, fit_method=self.fit_method)
-            self.unconstrained_estimator = LinearMarginEstimator(self.dataset, unconstrained_model)
-            self.unconstrained_estimator.fit()
+            self.unconstrained_estimator = fitted_linear_margin_estimator(unconstrained_model_class, self.coordinates, self.dataset, self.starting_year, self.fit_method)
             self.crashed = False
         except SafeRunException:
             self.crashed = True
@@ -127,7 +116,6 @@ class AbstractGevTrendTest(AbstractUnivariateTest):
         return self.constrained_estimator.margin_function_from_fit.get_gev_params(coordinate=np.array([1958]),
                                                                                   is_transformed=False)
 
-
     @property
     def test_trend_slope_strength(self):
         if self.crashed:
@@ -136,7 +124,8 @@ class AbstractGevTrendTest(AbstractUnivariateTest):
             # Compute the slope strength
             slope = self._slope_strength()
             # Delta T must in the same unit as were the parameter of slope mu1 and sigma1
-            slope *= self.nb_years_for_quantile_evolution * self.coordinates.transformed_distance_between_two_successive_years[0]
+            slope *= self.nb_years_for_quantile_evolution * \
+                     self.coordinates.transformed_distance_between_two_successive_years[0]
             return slope
 
     def _slope_strength(self):
@@ -163,6 +152,3 @@ class AbstractGevTrendTest(AbstractUnivariateTest):
             return 0.0
         else:
             return self.non_stationary_constant_gev_params.quantile(p=self.quantile_for_strength)
-
-
-
diff --git a/experiment/trend_analysis/univariate_test/univariate_test_results.py b/experiment/trend_analysis/univariate_test/univariate_test_results.py
new file mode 100644
index 0000000000000000000000000000000000000000..aeea3f63c4eb0a6edfd3fcb4a8d7dbe0e82855e0
--- /dev/null
+++ b/experiment/trend_analysis/univariate_test/univariate_test_results.py
@@ -0,0 +1,44 @@
+from multiprocessing.pool import Pool
+
+import numpy as np
+
+from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import AbstractGevTrendTest
+from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \
+    AbstractTemporalLinearMarginModel
+from root_utils import NB_CORES
+
+
+def compute_gev_change_point_test_result(smooth_maxima, starting_year, trend_test_class, years, fit_method=AbstractTemporalLinearMarginModel.ISMEV_GEV_FIT_METHOD_STR):
+    trend_test = trend_test_class(years, smooth_maxima, starting_year)  # type: AbstractGevTrendTest
+    assert isinstance(trend_test, AbstractGevTrendTest)
+    return trend_test.test_trend_type, \
+           trend_test.test_trend_slope_strength, \
+           trend_test.unconstained_nllh, \
+           trend_test.test_trend_constant_quantile, \
+           trend_test.mean_difference_same_sign_as_slope_strenght, \
+           trend_test.variance_difference_same_sign_as_slope_strenght, \
+           trend_test.unconstrained_model_deviance, \
+           trend_test.constrained_model_deviance
+
+
+def compute_gev_change_point_test_results(multiprocessing, maxima, starting_years, trend_test_class,
+                                          years):
+    if multiprocessing:
+        list_args = [(maxima, starting_year, trend_test_class, years) for starting_year in
+                     starting_years]
+        with Pool(NB_CORES) as p:
+            trend_test_res = p.starmap(compute_gev_change_point_test_result, list_args)
+    else:
+        trend_test_res = [
+            compute_gev_change_point_test_result(maxima, starting_year, trend_test_class, years)
+            for starting_year in starting_years]
+    # Keep only the most likely starting year
+    # (i.e. the starting year that minimizes its negative log likelihood)
+    # (set all the other data to np.nan so that they will not be taken into account in mean function)
+    best_idx = list(np.argmin(trend_test_res, axis=0))[2]
+    # print(best_idx, trend_test_res)
+    best_idxs = [best_idx]
+    # todo: by doing a sorting on the deviance, I could get the nb_top_likelihood_values values
+    # best_idxs = list(np.argmax(trend_test_res, axis=0))[-nb_top_likelihood_values:]
+
+    return trend_test_res, best_idxs
diff --git a/experiment/trend_analysis/univariate_test/utils.py b/experiment/trend_analysis/univariate_test/utils.py
index 8c208ec24e67af7567ac874061d30d0f9fa8704c..75e4f7ff9c43157a0fa3f6cfde768bebd5c38367 100644
--- a/experiment/trend_analysis/univariate_test/utils.py
+++ b/experiment/trend_analysis/univariate_test/utils.py
@@ -1,44 +1,27 @@
-from multiprocessing.pool import Pool
+import pandas as pd
 
-import numpy as np
+from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
+from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
+from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \
+    AbstractTemporalCoordinates
+from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.abstract_transformation import \
+    CenteredScaledNormalization
+from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
+from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \
+    AbstractSpatioTemporalObservations
 
-from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import AbstractGevTrendTest
-from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \
-    AbstractTemporalLinearMarginModel
-from utils import NB_CORES
 
+def load_temporal_coordinates_and_dataset(maxima, years):
+    df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: years})
+    df_maxima_gev = pd.DataFrame(maxima, index=df.index)
+    observations = AbstractSpatioTemporalObservations(df_maxima_gev=df_maxima_gev)
+    coordinates = AbstractTemporalCoordinates.from_df(df, transformation_class=CenteredScaledNormalization)
+    dataset = AbstractDataset(observations=observations, coordinates=coordinates)
+    return coordinates, dataset
 
-def compute_gev_change_point_test_result(smooth_maxima, starting_year, trend_test_class, years, fit_method=AbstractTemporalLinearMarginModel.ISMEV_GEV_FIT_METHOD_STR):
-    trend_test = trend_test_class(years, smooth_maxima, starting_year)  # type: AbstractGevTrendTest
-    assert isinstance(trend_test, AbstractGevTrendTest)
-    return trend_test.test_trend_type, \
-           trend_test.test_trend_slope_strength, \
-           trend_test.unconstained_nllh, \
-           trend_test.test_trend_constant_quantile, \
-           trend_test.mean_difference_same_sign_as_slope_strenght, \
-           trend_test.variance_difference_same_sign_as_slope_strenght, \
-           trend_test.unconstrained_model_deviance, \
-           trend_test.constrained_model_deviance
 
-
-def compute_gev_change_point_test_results(multiprocessing, maxima, starting_years, trend_test_class,
-                                          years):
-    if multiprocessing:
-        list_args = [(maxima, starting_year, trend_test_class, years) for starting_year in
-                     starting_years]
-        with Pool(NB_CORES) as p:
-            trend_test_res = p.starmap(compute_gev_change_point_test_result, list_args)
-    else:
-        trend_test_res = [
-            compute_gev_change_point_test_result(maxima, starting_year, trend_test_class, years)
-            for starting_year in starting_years]
-    # Keep only the most likely starting year
-    # (i.e. the starting year that minimizes its negative log likelihood)
-    # (set all the other data to np.nan so that they will not be taken into account in mean function)
-    best_idx = list(np.argmin(trend_test_res, axis=0))[2]
-    # print(best_idx, trend_test_res)
-    best_idxs = [best_idx]
-    # todo: by doing a sorting on the deviance, I could get the nb_top_likelihood_values values
-    # best_idxs = list(np.argmax(trend_test_res, axis=0))[-nb_top_likelihood_values:]
-
-    return trend_test_res, best_idxs
+def fitted_linear_margin_estimator(model_class, coordinates, dataset, starting_year, fit_method):
+    model = model_class(coordinates, starting_point=starting_year, fit_method=fit_method)
+    estimator = LinearMarginEstimator(dataset, model)
+    estimator.fit()
+    return estimator
diff --git a/extreme_fit/distribution/gev/main_fevd_fixed.R b/extreme_fit/distribution/gev/main_fevd_fixed.R
index 6ecf87bb3661fa5aa6bb4d871120553c4b9446e1..6c9dca5b8f69b070845383df4ceb2f1d2834e80f 100644
--- a/extreme_fit/distribution/gev/main_fevd_fixed.R
+++ b/extreme_fit/distribution/gev/main_fevd_fixed.R
@@ -50,6 +50,7 @@ m = res$results
 print(dim(m))
 print(m[1,])
 print(m[1,1])
+print(res$chain.info[1,])
 
 
 
diff --git a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
index da993d39bbbe70c13df924c2e37ba0ee3bf9110d..cc8ae95541b8a74df94b6598a08fffc5845c6a95 100644
--- a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
+++ b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
@@ -31,8 +31,8 @@ class LinearMarginEstimator(AbstractMarginEstimator):
         df_coordinates_temp = self.dataset.coordinates.df_temporal_coordinates_for_fit(split=self.train_split,
                                                                                        starting_point=self.margin_model.starting_point)
         return self.margin_model.fitmargin_from_maxima_gev(data=maxima_gev_specialized,
-                                                                            df_coordinates_spat=df_coordinates_spat,
-                                                                            df_coordinates_temp=df_coordinates_temp)
+                                                           df_coordinates_spat=df_coordinates_spat,
+                                                           df_coordinates_temp=df_coordinates_temp)
 
     @cached_property
     def margin_function_from_fit(self) -> LinearMarginFunction:
diff --git a/extreme_fit/estimator/utils.py b/extreme_fit/estimator/utils.py
index 4f27e94943f62d1b3ae7b47396d8b28de109582b..dec6231828cbc6e60f2aaff9ca4fee0db0ae2f4c 100644
--- a/extreme_fit/estimator/utils.py
+++ b/extreme_fit/estimator/utils.py
@@ -3,8 +3,10 @@ from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model impo
 from extreme_fit.model.margin_model.margin_function.linear_margin_function import LinearMarginFunction
 
 
-def load_margin_function(estimator: AbstractEstimator, margin_model: LinearMarginModel, margin_function_class=LinearMarginFunction):
+def load_margin_function(estimator: AbstractEstimator, margin_model: LinearMarginModel, margin_function_class=LinearMarginFunction, coef_dict=None):
+    if coef_dict is None:
+        coef_dict = estimator.result_from_model_fit.margin_coef_dict
     return margin_function_class.from_coef_dict(coordinates=estimator.dataset.coordinates,
                                                 gev_param_name_to_dims=margin_model.margin_function_start_fit.gev_param_name_to_dims,
-                                                coef_dict=estimator.result_from_model_fit.margin_coef_dict,
+                                                coef_dict=coef_dict,
                                                 starting_point=margin_model.starting_point)
diff --git a/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py b/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py
index f67a5b85834efd182be0ff62c469a447c4eebfe6..1b96be1dfcd07848f22a24dba38e316ea8c99f7c 100644
--- a/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py
+++ b/extreme_fit/model/margin_model/linear_margin_model/abstract_temporal_linear_margin_model.py
@@ -5,7 +5,7 @@ from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model impo
 from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import AbstractResultFromModelFit
 from extreme_fit.model.result_from_model_fit.result_from_extremes import ResultFromExtremes
 from extreme_fit.model.result_from_model_fit.result_from_ismev import ResultFromIsmev
-from extreme_fit.model.utils import r, ro, get_null
+from extreme_fit.model.utils import r, ro, get_null, get_margin_formula_extremes
 from extreme_fit.model.utils import safe_run_r_estimator
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 
@@ -46,6 +46,7 @@ class AbstractTemporalLinearMarginModel(LinearMarginModel):
         #     9)), iter = 5000, verbose = TRUE, use.phi = FALSE)
 
         r_type_argument_kwargs = {'use.phi': False}
+        r_type_argument_kwargs.update(get_margin_formula_extremes(self.margin_function_start_fit.form_dict))
         # location.fun = ~1,
         # scale.fun = ~1, shape.fun = ~1
         # , priorParams = list(q=c(6), p=c(9))
diff --git a/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py b/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py
index 9dd671bc0be22c3c22b07586e1f68084b5df450c..fb10aa4786ed0abbde175bf83ac9b9fbbedfb066 100644
--- a/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py
+++ b/extreme_fit/model/margin_model/margin_function/abstract_margin_function.py
@@ -9,7 +9,7 @@ from extreme_fit.distribution.gev.gev_params import GevParams
 from experiment.meteo_france_data.plot.create_shifted_cmap import imshow_shifted
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 from spatio_temporal_dataset.slicer.split import Split
-from utils import cached_property
+from root_utils import cached_property
 
 
 class AbstractMarginFunction(object):
diff --git a/extreme_fit/model/margin_model/parametric_margin_model.py b/extreme_fit/model/margin_model/parametric_margin_model.py
index 9a762b1440ab16f554046231b19fbc4023c7d483..35ae8fa5e94bc417e5e05db4eb5abec43882b338 100644
--- a/extreme_fit/model/margin_model/parametric_margin_model.py
+++ b/extreme_fit/model/margin_model/parametric_margin_model.py
@@ -8,7 +8,7 @@ from extreme_fit.model.margin_model.margin_function.parametric_margin_function i
 from extreme_fit.model.result_from_model_fit.result_from_spatial_extreme import ResultFromSpatialExtreme
 from extreme_fit.model.margin_model.abstract_margin_model import AbstractMarginModel
 from extreme_fit.model.utils import safe_run_r_estimator, r, get_coord, \
-    get_margin_formula
+    get_margin_formula_spatial_extreme
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 
 
@@ -29,7 +29,7 @@ class ParametricMarginModel(AbstractMarginModel, ABC):
         assert data.shape[1] == len(df_coordinates_spat)
 
         # Margin formula for fitspatgev
-        fit_params = get_margin_formula(self.margin_function_start_fit.form_dict)
+        fit_params = get_margin_formula_spatial_extreme(self.margin_function_start_fit.form_dict)
 
         # Covariables
         covariables = get_coord(df_coordinates=df_coordinates_spat)
diff --git a/extreme_fit/model/max_stable_model/abstract_max_stable_model.py b/extreme_fit/model/max_stable_model/abstract_max_stable_model.py
index b2daca59a6ef060719935c27e78091c2b41b9038..3d6a4f61412a6cd5f6c2e234cec1880a723f16f0 100644
--- a/extreme_fit/model/max_stable_model/abstract_max_stable_model.py
+++ b/extreme_fit/model/max_stable_model/abstract_max_stable_model.py
@@ -8,7 +8,7 @@ from rpy2.rinterface._rinterface import RRuntimeError
 from extreme_fit.model.abstract_model import AbstractModel
 from extreme_fit.model.result_from_model_fit.result_from_spatial_extreme import ResultFromSpatialExtreme
 from extreme_fit.model.utils import r, safe_run_r_estimator, get_coord, \
-    get_margin_formula, SafeRunException
+    get_margin_formula_spatial_extreme, SafeRunException
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 
 
@@ -54,7 +54,7 @@ class AbstractMaxStableModel(AbstractModel):
         start_dict = self.remove_unused_parameters(start_dict, fitmaxstab_with_one_dimensional_data)
         if fit_marge:
             start_dict.update(margin_start_dict)
-            margin_formulas = get_margin_formula(fit_marge_form_dict)
+            margin_formulas = get_margin_formula_spatial_extreme(fit_marge_form_dict)
             fit_params.update(margin_formulas)
         if fitmaxstab_with_one_dimensional_data:
             fit_params['iso'] = True
diff --git a/extreme_fit/model/result_from_model_fit/result_from_extremes.py b/extreme_fit/model/result_from_model_fit/result_from_extremes.py
index 7bb02b5781e0a68b40be1be188d512ca130cd5ef..162fe62861cd2c31f2c1f36f8c372b3514601089 100644
--- a/extreme_fit/model/result_from_model_fit/result_from_extremes.py
+++ b/extreme_fit/model/result_from_model_fit/result_from_extremes.py
@@ -1,17 +1,54 @@
+import numpy as np
+import pandas as pd
 from rpy2 import robjects
 
+from extreme_fit.distribution.gev.gev_params import GevParams
 from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import \
     AbstractResultFromModelFit
+from extreme_fit.model.result_from_model_fit.utils import get_margin_coef_dict
 
 
 class ResultFromExtremes(AbstractResultFromModelFit):
 
-    def __init__(self, result_from_fit: robjects.ListVector, gev_param_name_to_dim=None) -> None:
+    def __init__(self, result_from_fit: robjects.ListVector, gev_param_name_to_dim=None,
+                 burn_in_percentage=0.1) -> None:
         super().__init__(result_from_fit)
+        self.burn_in_percentage = burn_in_percentage
         self.gev_param_name_to_dim = gev_param_name_to_dim
-        print(list(self.name_to_value.keys()))
 
-    # @property
+    @property
+    def results(self):
+        return np.array(self.name_to_value['results'])
+
+    @property
+    def chain_info(self):
+        return np.array(self.name_to_value['chain.info'])
+
+    @property
+    def df_posterior_samples(self) -> pd.DataFrame:
+        d = dict(zip(GevParams.PARAM_NAMES, self.results.transpose()))
+        d['loglik'] = self.chain_info[:, -2]
+        d['prior'] = self.chain_info[:, -1]
+        df_all_samples = pd.DataFrame(d)
+        # Remove the burn in period
+        burn_in_last_index = int(self.burn_in_percentage * len(df_all_samples))
+        df_posterior_samples = df_all_samples.iloc[burn_in_last_index:, :]
+        return df_posterior_samples
+
+    def get_coef_dict_from_posterior_sample(self, s: pd.Series):
+        assert len(s) >= 3
+        values = {i: v for i, v in enumerate(s)}
+        return get_margin_coef_dict(self.gev_param_name_to_dim, values)
+
+    @property
+    def margin_coef_dict(self):
+        """ It is the coef for the margin function corresponding to the mean posterior parameters """
+        mean_posterior_parameters = self.df_posterior_samples.iloc[:, :-2].mean(axis=0)
+        return self.get_coef_dict_from_posterior_sample(mean_posterior_parameters)
+
+
+
+# @property
     # def
 
     # @property
@@ -49,4 +86,3 @@ class ResultFromExtremes(AbstractResultFromModelFit):
     # @property
     # def convergence(self) -> str:
     #     return convertFloatVector_to_float(self.name_to_value['conv']) == 0
-
diff --git a/extreme_fit/model/result_from_model_fit/result_from_ismev.py b/extreme_fit/model/result_from_model_fit/result_from_ismev.py
index e3691fc26ac6b0965be02ab4d64b7e63aca06c92..d83dacecbc9b1610fd35e04092feecc3ceade29e 100644
--- a/extreme_fit/model/result_from_model_fit/result_from_ismev.py
+++ b/extreme_fit/model/result_from_model_fit/result_from_ismev.py
@@ -1,14 +1,13 @@
-from typing import Dict
+from rpy2 import robjects
 
-import numpy as np
+from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import \
+    AbstractResultFromModelFit
+from extreme_fit.model.result_from_model_fit.utils import convertFloatVector_to_float, get_margin_coef_dict
 from rpy2 import robjects
 
-from extreme_fit.model.margin_model.param_function.linear_coef import LinearCoef
 from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import \
     AbstractResultFromModelFit
 from extreme_fit.model.result_from_model_fit.utils import convertFloatVector_to_float
-from extreme_fit.distribution.gev.gev_params import GevParams
-from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 
 
 class ResultFromIsmev(AbstractResultFromModelFit):
@@ -19,23 +18,7 @@ class ResultFromIsmev(AbstractResultFromModelFit):
 
     @property
     def margin_coef_dict(self):
-        assert self.gev_param_name_to_dim is not None
-        # Build the Coeff dict from gev_param_name_to_dim
-        coef_dict = {}
-        i = 0
-        mle_values = self.name_to_value['mle']
-        for gev_param_name in GevParams.PARAM_NAMES:
-            # Add intercept
-            intercept_coef_name = LinearCoef.coef_template_str(gev_param_name, LinearCoef.INTERCEPT_NAME).format(1)
-            coef_dict[intercept_coef_name] = mle_values[i]
-            i += 1
-            # Add a potential linear temporal trend
-            if gev_param_name in self.gev_param_name_to_dim:
-                temporal_coef_name = LinearCoef.coef_template_str(gev_param_name,
-                                                                  AbstractCoordinates.COORDINATE_T).format(1)
-                coef_dict[temporal_coef_name] = mle_values[i]
-                i += 1
-        return coef_dict
+        return get_margin_coef_dict(self.gev_param_name_to_dim, self.name_to_value['mle'])
 
     @property
     def all_parameters(self):
diff --git a/extreme_fit/model/result_from_model_fit/utils.py b/extreme_fit/model/result_from_model_fit/utils.py
index ebd20e933d72386e9f91564710cffbe02f379a78..6d559b4f79abc69925565f3f5cb54a2a02dd4941 100644
--- a/extreme_fit/model/result_from_model_fit/utils.py
+++ b/extreme_fit/model/result_from_model_fit/utils.py
@@ -1,5 +1,28 @@
 import numpy as np
 
+from extreme_fit.distribution.gev.gev_params import GevParams
+from extreme_fit.model.margin_model.param_function.linear_coef import LinearCoef
+from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
+
 
 def convertFloatVector_to_float(f):
     return np.array(f)[0]
+
+
+def get_margin_coef_dict(gev_param_name_to_dim, mle_values):
+    assert gev_param_name_to_dim is not None
+    # Build the Coeff dict from gev_param_name_to_dim
+    coef_dict = {}
+    i = 0
+    for gev_param_name in GevParams.PARAM_NAMES:
+        # Add intercept
+        intercept_coef_name = LinearCoef.coef_template_str(gev_param_name, LinearCoef.INTERCEPT_NAME).format(1)
+        coef_dict[intercept_coef_name] = mle_values[i]
+        i += 1
+        # Add a potential linear temporal trend
+        if gev_param_name in gev_param_name_to_dim:
+            temporal_coef_name = LinearCoef.coef_template_str(gev_param_name,
+                                                              AbstractCoordinates.COORDINATE_T).format(1)
+            coef_dict[temporal_coef_name] = mle_values[i]
+            i += 1
+    return coef_dict
diff --git a/extreme_fit/model/utils.py b/extreme_fit/model/utils.py
index f7ffaf044abb6f61203661362cf300037ea1c9ca..aec823bf7131b2462c4c7bac8ca4e41e87b4e5e3 100644
--- a/extreme_fit/model/utils.py
+++ b/extreme_fit/model/utils.py
@@ -1,26 +1,23 @@
 import io
 import os.path as op
+import random
 import warnings
 from contextlib import redirect_stdout
+from typing import Dict
 
 import numpy as np
-import random
-import sys
-from types import TracebackType
-from typing import Dict, Optional
-
 import pandas as pd
 import rpy2.robjects as ro
 from rpy2 import robjects
 from rpy2.rinterface import RRuntimeWarning
 from rpy2.rinterface._rinterface import RRuntimeError
-
 from rpy2.robjects import numpy2ri
 from rpy2.robjects import pandas2ri
 
-from utils import get_root_path
 
 # Load R variables
+from root_utils import get_root_path
+
 r = ro.R()
 numpy2ri.activate()
 pandas2ri.activate()
@@ -128,10 +125,27 @@ def get_null():
     return as_null(1.0)
 
 
-def get_margin_formula(fit_marge_form_dict) -> Dict:
+# todo: move that to the result class maybe
+def get_margin_formula_spatial_extreme(fit_marge_form_dict) -> Dict:
     margin_formula = {k: robjects.Formula(v) if v != 'NULL' else get_null() for k, v in fit_marge_form_dict.items()}
     return margin_formula
 
+
+def old_coef_name_to_new_coef_name():
+    return {
+        'temp.form.loc': 'location.fun',
+        'temp.form.scale': 'scale.fun',
+        'temp.form.shape': 'shape.fun',
+    }
+
+
+def get_margin_formula_extremes(fit_marge_form_dict) -> Dict:
+    d = old_coef_name_to_new_coef_name()
+    form_dict = {d[k]: v if v != 'NULL' else ' ~1' for k, v in fit_marge_form_dict.items()}
+    return {k: robjects.Formula(v) for k, v in form_dict.items()}
+
+
+
 # def conversion_to_FloatVector(data):
 #     """Convert DataFrame or numpy array into FloatVector for r"""
 #     if isinstance(data, pd.DataFrame):
diff --git a/utils.py b/root_utils.py
similarity index 100%
rename from utils.py
rename to root_utils.py
diff --git a/spatio_temporal_dataset/coordinates/abstract_coordinates.py b/spatio_temporal_dataset/coordinates/abstract_coordinates.py
index 290bbe31941aedfbd1e79da6fffe6ed81db89ab3..16cc04ad889777c65f04e166f8554f5bc621c66c 100644
--- a/spatio_temporal_dataset/coordinates/abstract_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/abstract_coordinates.py
@@ -251,7 +251,9 @@ class AbstractCoordinates(object):
             # Compute the indices to modify
             ind_to_modify = df_temporal_coordinates.iloc[:, 0] <= starting_point  # type: pd.Series
             # Assert that some coordinates are selected but not all
-            assert 0 < sum(ind_to_modify) < len(ind_to_modify), sum(ind_to_modify)
+            msg = '{} First year={} Last_year={}'.format(sum(ind_to_modify), df_temporal_coordinates.iloc[0, 0],
+                                  df_temporal_coordinates.iloc[-1, 0])
+            assert 0 < sum(ind_to_modify) < len(ind_to_modify), msg
             # Modify the temporal coordinates to enforce the stationarity
             df_temporal_coordinates.loc[ind_to_modify] = starting_point
             # Load the temporal transformation object
diff --git a/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py b/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py
index 0f1df96dcf5f88878616f2b30a1c2f9258a953d3..8007bfd98a283851e3f062a3dad7f464175ff8d0 100644
--- a/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/spatial_coordinates/alps_station_3D_coordinates.py
@@ -7,7 +7,7 @@ from spatio_temporal_dataset.coordinates.spatial_coordinates.abstract_spatial_co
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.tranformation_3D import \
     AnisotropyTransformation
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformed_coordinates import TransformedCoordinates
-from utils import get_full_path
+from root_utils import get_full_path
 
 
 class AlpsStation3DCoordinates(AbstractSpatialCoordinates):
diff --git a/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py
index f69ed0b4ab998ac28038eaba9646e3f9e7a4011b..83ead9544aba119a5d350bf4af807c8bfc350aa4 100644
--- a/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py
+++ b/spatio_temporal_dataset/spatio_temporal_observations/alps_precipitation_observations.py
@@ -2,7 +2,7 @@ import os.path as op
 
 from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \
     AbstractSpatioTemporalObservations
-from utils import get_full_path
+from root_utils import get_full_path
 
 
 class AlpsPrecipitationObservations(AbstractSpatioTemporalObservations):
diff --git a/test/test_experiment/test_SCM_study.py b/test/test_experiment/test_SCM_study.py
index 8c0e1e675d4ddc71c575405f027948bcd9af022d..7a0aa485325cb3420c07c24b6ccdc57bdbadc31f 100644
--- a/test/test_experiment/test_SCM_study.py
+++ b/test/test_experiment/test_SCM_study.py
@@ -13,7 +13,7 @@ from experiment.meteo_france_data.scm_models_data.visualization.study_visualizat
 from experiment.meteo_france_data.scm_models_data.visualization.study_visualization.study_visualizer import \
     StudyVisualizer
 from experiment.trend_analysis.univariate_test.gev_trend_test_one_parameter import GevLocationTrendTest
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 
 class TestSCMAllStudy(unittest.TestCase):
diff --git a/test/test_experiment/test_coordinate_sensitivity.py b/test/test_experiment/test_coordinate_sensitivity.py
index 081d5ce00fc46ec9a49152c2ce79237f5b9b1452..e709029abfec4075542c843475e1eb6a15a32b42 100644
--- a/test/test_experiment/test_coordinate_sensitivity.py
+++ b/test/test_experiment/test_coordinate_sensitivity.py
@@ -9,7 +9,7 @@ from experiment.trend_analysis.non_stationary_trends import \
     ConditionalIndedendenceLocationTrendTest
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \
     BetweenZeroAndOneNormalization, BetweenZeroAndOneNormalizationMinEpsilon, BetweenZeroAndOneNormalizationMaxEpsilon
-from utils import get_display_name_from_object_type
+from root_utils import get_display_name_from_object_type
 
 
 class TestCoordinateSensitivity(unittest.TestCase):
diff --git a/test/test_experiment/test_region_eurocode.py b/test/test_experiment/test_region_eurocode.py
index 97e7d3df8bce498c23dc33a8ca8b7ddc0cd9b61e..4aa64fb10b1e94ffa731f5ffe1d30e7b300bfe34 100644
--- a/test/test_experiment/test_region_eurocode.py
+++ b/test/test_experiment/test_region_eurocode.py
@@ -1,21 +1,24 @@
 import unittest
 from collections import OrderedDict
 
-from experiment.eurocode_data.eurocode_visualizer import display_region_limit
-from experiment.eurocode_data.eurocode_region import C1, E, C2
+from experiment.eurocode_data.eurocode_return_level_uncertainties import EurocodeLevelUncertaintyFromExtremes
+from experiment.eurocode_data.eurocode_visualizer import plot_model_name_to_dep_to_ordered_return_level_uncertainties
+from experiment.eurocode_data.massif_name_to_departement import DEPARTEMENT_TYPES
+from experiment.eurocode_data.utils import EUROCODE_ALTITUDES
 
 
 class TestCoordinateSensitivity(unittest.TestCase):
     DISPLAY = False
 
-    def test_region_eurocode(self):
-        altitudes = [900, 1200, 1500, 1800]
-        ordered_massif_name_to_quantiles = OrderedDict()
-        ordered_massif_name_to_quantiles['Vanoise'] = [1.2, 1.5, 1.7, 2.1]
-        ordered_massif_name_to_quantiles['Vercors'] = [0.7, 0.8, 1.1, 1.5]
-        display_region_limit(C1, altitudes, ordered_massif_name_to_quantiles, display=self.DISPLAY)
-        display_region_limit(C2, altitudes, ordered_massif_name_to_quantiles, display=self.DISPLAY)
-        display_region_limit(E, altitudes, ordered_massif_name_to_quantiles, display=self.DISPLAY)
+    def test_departement_eurocode_plot(self):
+        # Create an example
+        example = EurocodeLevelUncertaintyFromExtremes(posterior_mean=1.0,
+                                                       poster_uncertainty_interval=(0.5, 1.25))
+        dep_to_model_name_toreturn_level_uncertainty = {
+            dep: {"example": [example for _ in EUROCODE_ALTITUDES]} for dep in DEPARTEMENT_TYPES
+        }
+        plot_model_name_to_dep_to_ordered_return_level_uncertainties(dep_to_model_name_toreturn_level_uncertainty,
+                                                                     show=self.DISPLAY)
 
 
 if __name__ == '__main__':
diff --git a/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py b/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py
index 5f5ba22c95cc359aa037976d793eca6c4da5e802..2b9aad9aef6871dc19c5de00fc6746293c3427ca 100644
--- a/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py
+++ b/test/test_extreme_estimator/test_margin_fits/test_gev/test_gev_temporal_bayesian.py
@@ -3,11 +3,14 @@ import unittest
 import numpy as np
 import pandas as pd
 
+from experiment.trend_analysis.univariate_test.abstract_gev_trend_test import fitted_linear_margin_estimator
+from extreme_fit.distribution.gev.gev_params import GevParams
 from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
 from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \
     AbstractTemporalLinearMarginModel
 from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import StationaryStationModel, \
     NonStationaryLocationStationModel
+from extreme_fit.model.result_from_model_fit.result_from_extremes import ResultFromExtremes
 from extreme_fit.model.utils import r, set_seed_r
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \
@@ -18,74 +21,50 @@ from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_tempor
 from test.test_utils import load_non_stationary_temporal_margin_models
 
 
-# class TestGevTemporalBayesian(unittest.TestCase):
-#
-#     def setUp(self) -> None:
-#         set_seed_r()
-#         r("""
-#         N <- 50
-#         loc = 0; scale = 1; shape <- 1
-#         x_gev <- rgev(N, loc = loc, scale = scale, shape = shape)
-#         start_loc = 0; start_scale = 1; start_shape = 1
-#         """)
-#         # Compute the stationary temporal margin with isMev
-#         self.start_year = 0
-#         df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: range(self.start_year, self.start_year + 50)})
-#         self.coordinates = AbstractTemporalCoordinates.from_df(df)
-#         df2 = pd.DataFrame(data=np.array(r['x_gev']), index=df.index)
-#         observations = AbstractSpatioTemporalObservations(df_maxima_gev=df2)
-#         self.dataset = AbstractDataset(observations=observations, coordinates=self.coordinates)
-#         self.fit_method = AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR
-#
-#     def test_gev_temporal_margin_fit_stationary(self):
-#         # Create estimator
-#         margin_model = StationaryStationModel(self.coordinates, fit_method=self.fit_method)
-#         estimator = LinearMarginEstimator(self.dataset, margin_model)
-#         estimator.fit()
-#         ref = {'loc': 0.0219, 'scale': 1.0347, 'shape': 0.8295}
-#         for year in range(1, 3):
-#             mle_params_estimated = estimator.margin_function_fitted.get_gev_params(np.array([year])).to_dict()
-#             for key in ref.keys():
-#                 self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3)
+class TestGevTemporalBayesian(unittest.TestCase):
 
-    # def test_gev_temporal_margin_fit_nonstationary(self):
-    #     # Create estimator
-    #     margin_models = load_non_stationary_temporal_margin_models(self.coordinates)
-    #     for margin_model in margin_models:
-    #         # margin_model = NonStationaryLocationStationModel(self.coordinates)
-    #         estimator = LinearMarginEstimator(self.dataset, margin_model)
-    #         estimator.fit()
-    #         # Checks that parameters returned are indeed different
-    #         mle_params_estimated_year1 = estimator.margin_function_fitted.get_gev_params(np.array([1])).to_dict()
-    #         mle_params_estimated_year3 = estimator.margin_function_fitted.get_gev_params(np.array([3])).to_dict()
-    #         self.assertNotEqual(mle_params_estimated_year1, mle_params_estimated_year3)
-    #
-    # def test_gev_temporal_margin_fit_nonstationary_with_start_point(self):
+    def setUp(self) -> None:
+        set_seed_r()
+        r("""
+        N <- 50
+        loc = 0; scale = 1; shape <- 1
+        x_gev <- rgev(N, loc = loc, scale = scale, shape = shape)
+        start_loc = 0; start_scale = 1; start_shape = 1
+        """)
+        # Compute the stationary temporal margin with isMev
+        self.start_year = 0
+        df = pd.DataFrame({AbstractCoordinates.COORDINATE_T: range(self.start_year, self.start_year + 50)})
+        self.coordinates = AbstractTemporalCoordinates.from_df(df)
+        df2 = pd.DataFrame(data=np.array(r['x_gev']), index=df.index)
+        observations = AbstractSpatioTemporalObservations(df_maxima_gev=df2)
+        self.dataset = AbstractDataset(observations=observations, coordinates=self.coordinates)
+        self.fit_method = AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR
+
+    def test_gev_temporal_margin_fit_stationary(self):
+        # Create estimator
+        estimator_fitted = fitted_linear_margin_estimator(StationaryStationModel, self.coordinates, self.dataset,
+                                                          starting_year=0,
+                                                          fit_method=AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR)
+        ref = {'loc': 0.082261, 'scale': 1.183703, 'shape': 0.882750}
+        for year in range(1, 3):
+            mle_params_estimated = estimator_fitted.margin_function_from_fit.get_gev_params(np.array([year])).to_dict()
+            for key in ref.keys():
+                self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3)
+
+    # def test_gev_temporal_margin_fit_non_stationary(self):
     #     # Create estimator
-    #     estimator = self.fit_non_stationary_estimator(starting_point=3)
-    #     self.assertNotEqual(estimator.margin_function_fitted.mu1_temporal_trend, 0.0)
-    #     # Checks starting point parameter are well passed
-    #     self.assertEqual(3, estimator.margin_function_fitted.starting_point)
-    #     # Checks that parameters returned are indeed different
-    #     mle_params_estimated_year1 = estimator.margin_function_fitted.get_gev_params(np.array([1])).to_dict()
-    #     mle_params_estimated_year3 = estimator.margin_function_fitted.get_gev_params(np.array([3])).to_dict()
-    #     self.assertEqual(mle_params_estimated_year1, mle_params_estimated_year3)
-    #     mle_params_estimated_year5 = estimator.margin_function_fitted.get_gev_params(np.array([5])).to_dict()
-    #     self.assertNotEqual(mle_params_estimated_year5, mle_params_estimated_year3)
-    #
-    # def fit_non_stationary_estimator(self, starting_point):
-    #     margin_model = NonStationaryLocationStationModel(self.coordinates, starting_point=starting_point + self.start_year)
-    #     estimator = LinearMarginEstimator(self.dataset, margin_model)
-    #     estimator.fit()
-    #     return estimator
-    #
-    # def test_two_different_starting_points(self):
-    #     # Create two different estimators
-    #     estimator1 = self.fit_non_stationary_estimator(starting_point=3)
-    #     estimator2 = self.fit_non_stationary_estimator(starting_point=28)
-    #     mu1_estimator1 = estimator1.margin_function_fitted.mu1_temporal_trend
-    #     mu1_estimator2 = estimator2.margin_function_fitted.mu1_temporal_trend
-    #     self.assertNotEqual(mu1_estimator1, mu1_estimator2)
+    #     estimator_fitted = fitted_linear_margin_estimator(NonStationaryLocationStationModel, self.coordinates, self.dataset,
+    #                                                       starting_year=0,
+    #                                                       fit_method=AbstractTemporalLinearMarginModel.EXTREMES_FEVD_BAYESIAN_FIT_METHOD_STR)
+    #     result_from_model_fit = estimator_fitted.result_from_model_fit  # type: ResultFromExtremes
+    #     print(result_from_model_fit.posterior_samples)
+
+    # print(estimator.result_from_model_fit.r)
+    # ref = {'loc': 0.0219, 'scale': 1.0347, 'shape': 0.8295}
+    # for year in range(1, 3):
+    #     mle_params_estimated = estimator.margin_function_from_fit.get_gev_params(np.array([year])).to_dict()
+    #     for key in ref.keys():
+    #         self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3)
 
 
 if __name__ == '__main__':