diff --git a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
index 8286e8d56d86ab76741ea3fa12ca0ba5e07db0dd..71761b228ab1cd241ff0c974f253204129116377 100644
--- a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
+++ b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
@@ -1,9 +1,10 @@
 from abc import ABC
 
+import numpy as np
 from cached_property import cached_property
 
 from extreme_fit.estimator.abstract_estimator import AbstractEstimator
-from extreme_fit.estimator.utils import load_margin_function, compute_nllh
+from extreme_fit.estimator.utils import load_margin_function
 from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import LinearMarginModel
 from extreme_fit.function.margin_function.linear_margin_function import LinearMarginFunction
 from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import AbstractResultFromModelFit
@@ -42,11 +43,19 @@ class LinearMarginEstimator(AbstractMarginEstimator):
     def maxima_gev_train(self):
         return self.dataset.maxima_gev_for_spatial_extremes_package(self.train_split)
 
-    @property
-    def nllh(self, split=Split.all):
-        assert split is Split.all
-        return compute_nllh(self, self.maxima_gev_train, self.coordinate_temp, self.margin_model)
-
     @cached_property
     def function_from_fit(self) -> LinearMarginFunction:
         return load_margin_function(self, self.margin_model)
+
+    def nllh(self, split=Split.all):
+        nllh = 0
+        maxima_values = self.dataset.maxima_gev(split=split)
+        coordinate_values = self.dataset.coordinates_values(split=split)
+        for maximum, coordinate in zip(maxima_values, coordinate_values):
+            assert len(
+                maximum) == 1, 'So far, only one observation for each coordinate, but code would be easy to change'
+            maximum = maximum[0]
+            gev_params = self.function_from_fit.get_gev_params(coordinate, is_transformed=False)
+            p = gev_params.density(maximum)
+            nllh -= np.log(p)
+        return nllh
diff --git a/extreme_fit/estimator/margin_estimator/utils.py b/extreme_fit/estimator/margin_estimator/utils.py
index 145174be68d32fde906adff400377ea6b3c4a29e..23789660a344ad4ec4f06edf35ee24b256aa74f4 100644
--- a/extreme_fit/estimator/margin_estimator/utils.py
+++ b/extreme_fit/estimator/margin_estimator/utils.py
@@ -4,18 +4,20 @@ import pandas as pd
 
 from extreme_fit.distribution.gev.gev_params import GevParams
 from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
-from extreme_fit.model.margin_model.utils import MarginFitMethod
 from extreme_fit.model.margin_model.linear_margin_model.temporal_linear_margin_models import StationaryTemporalModel
+from extreme_fit.model.margin_model.utils import MarginFitMethod
 from spatio_temporal_dataset.coordinates.temporal_coordinates.generated_temporal_coordinates import \
     ConsecutiveTemporalCoordinates
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.abstract_transformation import \
     CenteredScaledNormalization
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \
-    AbstractSpatioTemporalObservations
 from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima
 
 
+def fitted_linear_margin_estimator_short(model_class, dataset, fit_method, **model_kwargs) -> LinearMarginEstimator:
+    return fitted_linear_margin_estimator(model_class, dataset.coordinates, dataset, None, fit_method, **model_kwargs)
+
+
 def fitted_linear_margin_estimator(model_class, coordinates, dataset, starting_year, fit_method, **model_kwargs):
     model = model_class(coordinates, starting_point=starting_year, fit_method=fit_method, **model_kwargs)
     estimator = LinearMarginEstimator(dataset, model)
@@ -23,7 +25,8 @@ def fitted_linear_margin_estimator(model_class, coordinates, dataset, starting_y
     return estimator
 
 
-def fitted_stationary_gev(x_gev, fit_method=MarginFitMethod.is_mev_gev_fit, model_class=StationaryTemporalModel, starting_year=None,
+def fitted_stationary_gev(x_gev, fit_method=MarginFitMethod.is_mev_gev_fit, model_class=StationaryTemporalModel,
+                          starting_year=None,
                           transformation_class=CenteredScaledNormalization) -> GevParams:
     coordinates = ConsecutiveTemporalCoordinates.from_nb_temporal_steps(nb_temporal_steps=len(x_gev),
                                                                         transformation_class=CenteredScaledNormalization)
diff --git a/extreme_fit/estimator/utils.py b/extreme_fit/estimator/utils.py
index e66669fc6bd5c761690716d8d685aaab66271fdb..8debd9401a2e317f219a18e408af9228d208093f 100644
--- a/extreme_fit/estimator/utils.py
+++ b/extreme_fit/estimator/utils.py
@@ -15,12 +15,4 @@ def load_margin_function(estimator: AbstractEstimator, margin_model: LinearMargi
                                                 starting_point=margin_model.starting_point)
 
 
-def compute_nllh(estimator: AbstractEstimator, maxima, coordinate_temp, margin_model: LinearMarginModel,
-                 margin_function_class=LinearMarginFunction, coef_dict=None):
-    margin_function = load_margin_function(estimator, margin_model, margin_function_class, coef_dict)
-    nllh = 0
-    for maximum, year in zip(maxima[0], coordinate_temp.values):
-        gev_params = margin_function.get_gev_params(year, is_transformed=False)
-        p = gev_params.density(maximum)
-        nllh -= np.log(p)
-    return nllh
+
diff --git a/extreme_fit/model/margin_model/parametric_margin_model.py b/extreme_fit/model/margin_model/parametric_margin_model.py
index 8e83405d81f00e750af37c5c141f8d3357e33377..b27eeb14f6e6439c4a7d4db85702a674865dfa08 100644
--- a/extreme_fit/model/margin_model/parametric_margin_model.py
+++ b/extreme_fit/model/margin_model/parametric_margin_model.py
@@ -6,6 +6,7 @@ import pandas as pd
 from extreme_fit.distribution.gev.gev_params import GevParams
 from extreme_fit.function.margin_function.parametric_margin_function import \
     ParametricMarginFunction
+from extreme_fit.model.margin_model.utils import MarginFitMethod
 from extreme_fit.model.result_from_model_fit.result_from_spatial_extreme import ResultFromSpatialExtreme
 from extreme_fit.model.margin_model.abstract_margin_model import AbstractMarginModel
 from extreme_fit.model.utils import safe_run_r_estimator, r, get_coord, \
@@ -16,10 +17,12 @@ from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoo
 class ParametricMarginModel(AbstractMarginModel, ABC):
 
     def __init__(self, coordinates: AbstractCoordinates, use_start_value=False, params_start_fit=None,
-                 params_sample=None, starting_point=None, params_class=GevParams):
+                 params_sample=None, starting_point=None, params_class=GevParams,
+                 fit_method=MarginFitMethod.spatial_extremes_mle):
         """
         :param starting_point: starting coordinate for the temporal trend
         """
+        self.fit_method = fit_method
         self.starting_point = starting_point
         self.margin_function_sample = None  # type: ParametricMarginFunction
         self.margin_function_start_fit = None  # type: ParametricMarginFunction
@@ -29,10 +32,12 @@ class ParametricMarginModel(AbstractMarginModel, ABC):
     def fitmargin_from_maxima_gev(self, data: np.ndarray, df_coordinates_spat: pd.DataFrame,
                                   df_coordinates_temp: pd.DataFrame) -> ResultFromSpatialExtreme:
         assert data.shape[1] == len(df_coordinates_spat)
+        if self.fit_method == MarginFitMethod.spatial_extremes_mle:
+            return self.fit_from_spatial_extremes(data, df_coordinates_spat, df_coordinates_temp)
+        else:
+            raise NotImplementedError
 
-        return self.fit_from_statial_extremes(data, df_coordinates_spat, df_coordinates_temp)
-
-    def fit_from_statial_extremes(self, data, df_coordinates_spat, df_coordinates_temp):
+    def fit_from_spatial_extremes(self, data, df_coordinates_spat, df_coordinates_temp):
         # Margin formula for fitspatgev
         fit_params = get_margin_formula_spatial_extreme(self.margin_function_start_fit.form_dict)
         # Covariables
diff --git a/extreme_fit/model/margin_model/utils.py b/extreme_fit/model/margin_model/utils.py
index 9576316238ed1fcb4cec07f4af264dffc5cf147b..0985928a7c51251dc05001cd6ed863109df8eda5 100644
--- a/extreme_fit/model/margin_model/utils.py
+++ b/extreme_fit/model/margin_model/utils.py
@@ -7,6 +7,7 @@ class MarginFitMethod(Enum):
     extremes_fevd_mle = 2
     extremes_fevd_gmle = 3
     extremes_fevd_l_moments = 4
+    spatial_extremes_mle = 5
 
 
 FEVD_MARGIN_FIT_METHOD_TO_ARGUMENT_STR = {
diff --git a/projects/contrasting_trends_in_snow_loads/altitunal_fit/altitudes_studies.py b/projects/contrasting_trends_in_snow_loads/altitudes_fit/altitudes_studies.py
similarity index 100%
rename from projects/contrasting_trends_in_snow_loads/altitunal_fit/altitudes_studies.py
rename to projects/contrasting_trends_in_snow_loads/altitudes_fit/altitudes_studies.py
diff --git a/projects/contrasting_trends_in_snow_loads/altitunal_fit/two_fold_estimation.py b/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_datasets_generator.py
similarity index 75%
rename from projects/contrasting_trends_in_snow_loads/altitunal_fit/two_fold_estimation.py
rename to projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_datasets_generator.py
index 05b1ddf96096d3ce637f1274ab03416382e98488..d8b698424377fae66e3fc5109d99757759668940 100644
--- a/projects/contrasting_trends_in_snow_loads/altitunal_fit/two_fold_estimation.py
+++ b/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_datasets_generator.py
@@ -1,20 +1,26 @@
 from typing import Tuple, Dict, List
 
-from projects.contrasting_trends_in_snow_loads.altitunal_fit.altitudes_studies import AltitudesStudies
+from cached_property import cached_property
+
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.altitudes_studies import AltitudesStudies
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
 from spatio_temporal_dataset.slicer.split import invert_s_split
 
 
-class TwoFoldEstimation(object):
+class TwoFoldDatasetsGenerator(object):
 
-    def __init__(self, studies: AltitudesStudies, nb_samples):
+    def __init__(self, studies: AltitudesStudies, nb_samples, massif_names=None):
         self.studies = studies
         self.nb_samples = nb_samples
+        if massif_names is None:
+            self.massif_names = self.studies.study.all_massif_names()
+        else:
+            self.massif_names = massif_names
 
-    @property
+    @cached_property
     def massif_name_to_list_two_fold_datasets(self) -> Dict[str, List[Tuple[AbstractDataset, AbstractDataset]]]:
         d = {}
-        for massif_name in self.studies.study.all_massif_names():
+        for massif_name in self.massif_names:
             l = []
             for _ in range(self.nb_samples):
                 # Append to the list
diff --git a/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_detail_fit.py b/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_detail_fit.py
new file mode 100644
index 0000000000000000000000000000000000000000..d58f1d4709aa008bfc314a54805f4a52696e201e
--- /dev/null
+++ b/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_detail_fit.py
@@ -0,0 +1,84 @@
+from typing import List
+
+import numpy as np
+
+from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
+from extreme_fit.estimator.margin_estimator.utils import fitted_linear_margin_estimator_short
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.utils import get_key_with_min_value, Score, Grouping
+from spatio_temporal_dataset.slicer.split import Split
+
+
+class TwoFoldMassifFit(object):
+
+    def __init__(self, model_classes, list_two_fold_datasets, **kargs):
+        self.model_classes = model_classes
+        self.sample_id_to_sample_fit = {
+            sample_id: TwoFoldSampleFit(model_classes, two_fold_datasets=two_fold_datasets, **kargs)
+            for sample_id, two_fold_datasets in enumerate(list_two_fold_datasets)
+        }
+
+    def best_model(self, score, group):
+        if group is Grouping.MEAN_RANKING:
+            return get_key_with_min_value(self.model_class_to_mean_ranking(score))
+        else:
+            raise NotImplementedError
+
+    def sample_id_to_ordered_model(self, score):
+        return {
+            sample_id: sample_fit.ordered_model_classes(score)
+            for sample_id, sample_fit in self.sample_id_to_sample_fit.items()
+        }
+
+    def model_class_to_scores(self):
+        pass
+
+    def model_class_to_rankings(self, score):
+        model_class_to_ranks = {model_class: [] for model_class in self.model_classes}
+        for ordered_model in self.sample_id_to_ordered_model(score=score).values():
+            for rank, model_class in enumerate(ordered_model):
+                model_class_to_ranks[model_class].append(rank)
+        return model_class_to_ranks
+
+    def model_class_to_mean_ranking(self, score):
+        return {
+            model_class: np.mean(ranks)
+            for model_class, ranks in self.model_class_to_rankings(score).items()
+        }
+
+
+class TwoFoldSampleFit(object):
+
+    def __init__(self, model_classes, **kargs):
+        self.model_classes = model_classes
+        self.model_class_to_model_fit = {
+            model_class: TwoFoldModelFit(model_class, **kargs) for model_class in self.model_classes
+        }
+
+    def ordered_model_classes(self, score):
+        # Always ordered from the lower score to the higher score.
+        key = lambda model_class: self.model_class_to_model_fit[model_class].score(score)
+        return sorted(self.model_classes, key=key)
+
+
+class TwoFoldModelFit(object):
+
+    def __init__(self, model_class, two_fold_datasets, fit_method):
+        self.model_class = model_class
+        self.fit_method = fit_method
+        self.estimator_fold_1, self.estimator_fold_2 = [
+            fitted_linear_margin_estimator_short(model_class=self.model_class, dataset=dataset,
+                                                 fit_method=self.fit_method) for dataset in two_fold_datasets]
+
+    @property
+    def estimators(self) -> List[LinearMarginEstimator]:
+        return [self.estimator_fold_1, self.estimator_fold_2]
+
+    def score(self, score):
+        if score == Score.NLLH_TEST:
+            return self.nllh_test_temporal
+        else:
+            raise NotImplementedError
+
+    @property
+    def nllh_test_temporal(self):
+        return sum([e.nllh(split=Split.test_temporal) for e in self.estimators])
diff --git a/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_fit.py b/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_fit.py
new file mode 100644
index 0000000000000000000000000000000000000000..49569ee17090227547e75178e49a468bcfa20c62
--- /dev/null
+++ b/projects/contrasting_trends_in_snow_loads/altitudes_fit/two_fold_fit.py
@@ -0,0 +1,45 @@
+from enum import Enum
+from typing import Dict, List
+
+from cached_property import cached_property
+
+from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
+from extreme_fit.estimator.margin_estimator.utils import fitted_linear_margin_estimator, \
+    fitted_linear_margin_estimator_short
+from extreme_fit.model.margin_model.abstract_margin_model import AbstractMarginModel
+from extreme_fit.model.margin_model.utils import \
+    MarginFitMethod
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.two_fold_datasets_generator import TwoFoldDatasetsGenerator
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.two_fold_detail_fit import TwoFoldMassifFit
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.utils import Score, Grouping
+from spatio_temporal_dataset.slicer.split import Split
+
+
+class TwoFoldFit(object):
+
+    def __init__(self, two_fold_datasets_generator: TwoFoldDatasetsGenerator,
+                 model_family_name_to_model_classes: Dict[str, List[type]],
+                 fit_method=MarginFitMethod.extremes_fevd_mle,
+                 ):
+        self.two_fold_datasets_generator = two_fold_datasets_generator
+        self.fit_method = fit_method
+        self.model_family_name_to_model_classes = model_family_name_to_model_classes
+
+        self.massif_name_to_massif_fit = {}
+        for massif_name, list_two_fold_datasets in self.two_fold_datasets_generator.massif_name_to_list_two_fold_datasets.items():
+            self.massif_name_to_massif_fit[massif_name] = TwoFoldMassifFit(model_classes=self.model_classes_to_fit,
+                                                                           list_two_fold_datasets=list_two_fold_datasets,
+                                                                           fit_method=self.fit_method)
+
+    @cached_property
+    def model_classes_to_fit(self):
+        return set().union(*[set(model_classes) for model_classes in self.model_family_name_to_model_classes.values()])
+
+    def model_family_name_to_best_model(self, score):
+        pass
+
+    def massif_name_to_best_model(self, score=Score.NLLH_TEST, group=Grouping.MEAN_RANKING):
+        return {
+            massif_name: massif_fit.best_model(score, group)
+            for massif_name, massif_fit in self.massif_name_to_massif_fit.items()
+        }
diff --git a/projects/contrasting_trends_in_snow_loads/altitudes_fit/utils.py b/projects/contrasting_trends_in_snow_loads/altitudes_fit/utils.py
new file mode 100644
index 0000000000000000000000000000000000000000..86372c5c76347c248aa8b9523de181b2397879ee
--- /dev/null
+++ b/projects/contrasting_trends_in_snow_loads/altitudes_fit/utils.py
@@ -0,0 +1,18 @@
+import operator
+from enum import Enum
+
+
+class Score(Enum):
+    NLLH_TEST = 1
+
+
+class Grouping(Enum):
+    MEAN_RANKING = 1
+
+
+def get_key_with_max_value(d):
+    return max(d.items(), key=operator.itemgetter(1))[0]
+
+
+def get_key_with_min_value(d):
+    return min(d.items(), key=operator.itemgetter(1))[0]
diff --git a/spatio_temporal_dataset/coordinates/abstract_coordinates.py b/spatio_temporal_dataset/coordinates/abstract_coordinates.py
index 7fde3c4ab682c25892b096bed94180167c89b9ed..10b5e563f3aa344f924e9a88f1671fe9e19a56e4 100644
--- a/spatio_temporal_dataset/coordinates/abstract_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/abstract_coordinates.py
@@ -216,7 +216,6 @@ class AbstractCoordinates(object):
         else:
             return self.df_coordinates(split, transformed).loc[:, self.spatial_coordinates_names].drop_duplicates()
 
-    @property
     def nb_stations(self, split: Split = Split.all) -> int:
         return len(self.df_spatial_coordinates(split))
 
@@ -292,7 +291,6 @@ class AbstractCoordinates(object):
     def temporal_coordinates(self):
         raise NotImplementedError
 
-    @property
     def nb_steps(self, split: Split = Split.all) -> int:
         return len(self.df_temporal_coordinates(split))
 
diff --git a/spatio_temporal_dataset/dataset/abstract_dataset.py b/spatio_temporal_dataset/dataset/abstract_dataset.py
index 91add0b4b0801a531c2fdee42189435ea8d9f2e5..0fbe11974a85ebd3f8fd2f179885c04e59cd3429 100644
--- a/spatio_temporal_dataset/dataset/abstract_dataset.py
+++ b/spatio_temporal_dataset/dataset/abstract_dataset.py
@@ -57,8 +57,8 @@ class AbstractDataset(object):
         array = maxima_function(split)
         if self.coordinates.has_spatio_temporal_coordinates:
             nb_obs = self.observations.nb_obs
-            nb_stations = self.coordinates.nb_stations
-            nb_steps = self.coordinates.nb_steps
+            nb_stations = self.coordinates.nb_stations(split)
+            nb_steps = self.coordinates.nb_steps(split)
             # Permute array lines
             time_steps = np.array(range(nb_steps))
             c = [time_steps * nb_stations + i for i in range(nb_stations)]
diff --git a/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py
index 9c59937e96fbc4ef442dbdd92819d9d2cdaf4c19..c8a889b7ad3f74f64710db04c1498a6ca8b41ad5 100644
--- a/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py
+++ b/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py
@@ -87,7 +87,7 @@ class FullSpatioTemporalAnnualMaxima(MaxStableAnnualMaxima):
                              coordinates: AbstractSpatioTemporalCoordinates, margin_model: AbstractMarginModel):
         # Sample with the max stable spatially
         spatial_coordinate = coordinates.spatial_coordinates
-        nb_total_obs = nb_obs * coordinates.nb_steps
+        nb_total_obs = nb_obs * coordinates.nb_steps()
         max_stable_annual_maxima = super().from_sampling(nb_total_obs, max_stable_model, spatial_coordinate)
         # Convert observation to a spatio temporal index
         max_stable_annual_maxima.convert_to_spatio_temporal_index(coordinates)
diff --git a/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py
index e89b3bad95654cabfc6a0a5fc06f486b5c7c3bb4..88bca20bb652832c71f7f100bbff25c718b59250 100644
--- a/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py
+++ b/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py
@@ -13,7 +13,7 @@ class DailyObservations(AbstractSpatioTemporalObservations):
     def transform_to_standard_shape(self, coordinates: AbstractTemporalCoordinates):
         assert isinstance(coordinates, AbstractTemporalCoordinates)
         df_coordinates = pd.concat([coordinates.df_all_coordinates for _ in range(self.nb_obs)])
-        df_coordinates.index = pd.Index(range(self.nb_obs * coordinates.nb_steps))
+        df_coordinates.index = pd.Index(range(self.nb_obs * coordinates.nb_steps()))
         coordinates = AbstractTemporalCoordinates.from_df(df_coordinates, train_split_ratio=None,
                                                           transformation_class=coordinates.transformation_class)
         df = pd.DataFrame(pd.concat([self.df_maxima_gev[c] for c in self.columns]))
diff --git a/test/test_extreme_fit/test_distribution/test_gev/test_gev_temporal_extremes_mle.py b/test/test_extreme_fit/test_distribution/test_gev/test_gev_temporal_extremes_mle.py
index e237f091120c7077a3d446afe53bc05a68c31736..6066ff4a2bdd44fe1adf10c05d685205c4f6c9ee 100644
--- a/test/test_extreme_fit/test_distribution/test_gev/test_gev_temporal_extremes_mle.py
+++ b/test/test_extreme_fit/test_distribution/test_gev/test_gev_temporal_extremes_mle.py
@@ -46,7 +46,7 @@ class TestGevTemporalExtremesMle(unittest.TestCase):
             mle_params_estimated = estimator.function_from_fit.get_gev_params(np.array([year])).to_dict()
             for key in ref.keys():
                 self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3)
-            self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+            self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
 
     def test_gev_temporal_margin_fit_non_stationary_location(self):
         # Create estimator
@@ -57,7 +57,7 @@ class TestGevTemporalExtremesMle(unittest.TestCase):
         mle_params_estimated_year1 = estimator.function_from_fit.get_gev_params(np.array([1])).to_dict()
         mle_params_estimated_year3 = estimator.function_from_fit.get_gev_params(np.array([3])).to_dict()
         self.assertNotEqual(mle_params_estimated_year1, mle_params_estimated_year3)
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
 
     def test_gev_temporal_margin_fit_non_stationary_location_and_scale(self):
         # Create estimator
@@ -69,7 +69,7 @@ class TestGevTemporalExtremesMle(unittest.TestCase):
         mle_params_estimated_year1 = estimator.function_from_fit.get_gev_params(np.array([1])).to_dict()
         mle_params_estimated_year3 = estimator.function_from_fit.get_gev_params(np.array([3])).to_dict()
         self.assertNotEqual(mle_params_estimated_year1, mle_params_estimated_year3)
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
 
 
 if __name__ == '__main__':
diff --git a/test/test_projects/test_contrasting/test_altitudes_studies.py b/test/test_projects/test_contrasting/test_altitudes_studies.py
index ced0f89fe7cbfd7b031d26eb33f08340641c2e8a..ed3cb272c0b7146b5e3b602518586ed325b4a7c2 100644
--- a/test/test_projects/test_contrasting/test_altitudes_studies.py
+++ b/test/test_projects/test_contrasting/test_altitudes_studies.py
@@ -1,7 +1,7 @@
 import unittest
 
 from extreme_data.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall1Day
-from projects.contrasting_trends_in_snow_loads.altitunal_fit.altitudes_studies import AltitudesStudies
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.altitudes_studies import AltitudesStudies
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 from spatio_temporal_dataset.slicer.split import Split, small_s_split_from_ratio
 from spatio_temporal_dataset.slicer.temporal_slicer import TemporalSlicer
diff --git a/test/test_projects/test_contrasting/test_two_fold_estimation.py b/test/test_projects/test_contrasting/test_two_fold_datasets_generator.py
similarity index 82%
rename from test/test_projects/test_contrasting/test_two_fold_estimation.py
rename to test/test_projects/test_contrasting/test_two_fold_datasets_generator.py
index 808e64469df72775256a0a180e988a4376213cd8..48c61a8a6bf735d3fd5200b15e410d3a8bb93f33 100644
--- a/test/test_projects/test_contrasting/test_two_fold_estimation.py
+++ b/test/test_projects/test_contrasting/test_two_fold_datasets_generator.py
@@ -2,19 +2,19 @@ import unittest
 import numpy as np
 
 from extreme_data.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall1Day
-from projects.contrasting_trends_in_snow_loads.altitunal_fit.altitudes_studies import AltitudesStudies
-from projects.contrasting_trends_in_snow_loads.altitunal_fit.two_fold_estimation import TwoFoldEstimation
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.altitudes_studies import AltitudesStudies
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.two_fold_datasets_generator import TwoFoldDatasetsGenerator
 from spatio_temporal_dataset.slicer.split import Split
 
 
-class TestAltitudesStudies(unittest.TestCase):
+class TestTwoFoldDatasetsGenerator(unittest.TestCase):
 
     def setUp(self) -> None:
         super().setUp()
         altitudes = [900, 1200]
         study_class = SafranSnowfall1Day
         studies = AltitudesStudies(study_class, altitudes, year_min=1959, year_max=1963)
-        self.two_fold_estimation = TwoFoldEstimation(studies, nb_samples=2)
+        self.two_fold_estimation = TwoFoldDatasetsGenerator(studies, nb_samples=2)
 
     def test_dataset_sizes(self):
         dataset1, dataset2 = self.two_fold_estimation.two_fold_datasets('Vercors')
diff --git a/test/test_projects/test_contrasting/test_two_fold_fit.py b/test/test_projects/test_contrasting/test_two_fold_fit.py
new file mode 100644
index 0000000000000000000000000000000000000000..023816ea4265f80b66f558fad36c50d5aa3cc53f
--- /dev/null
+++ b/test/test_projects/test_contrasting/test_two_fold_fit.py
@@ -0,0 +1,41 @@
+import unittest
+import unittest
+
+import numpy as np
+
+from extreme_data.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall1Day
+from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import ConstantMarginModel, \
+    LinearLocationAllDimsMarginModel
+from extreme_fit.model.margin_model.utils import MarginFitMethod
+from extreme_fit.model.utils import set_seed_for_test
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.altitudes_studies import AltitudesStudies
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.two_fold_datasets_generator import TwoFoldDatasetsGenerator
+from projects.contrasting_trends_in_snow_loads.altitudes_fit.two_fold_fit import TwoFoldFit
+from spatio_temporal_dataset.slicer.split import Split
+
+
+class TestTwoFoldFit(unittest.TestCase):
+
+    def setUp(self) -> None:
+        super().setUp()
+        set_seed_for_test()
+        altitudes = [900, 1200]
+        study_class = SafranSnowfall1Day
+        studies = AltitudesStudies(study_class, altitudes, year_min=1959, year_max=1963)
+        self.two_fold_datasets_generator = TwoFoldDatasetsGenerator(studies, nb_samples=2, massif_names=['Vercors'])
+        self.model_family_name_to_model_class = {'Stationary': [ConstantMarginModel],
+                                                 'Linear': [ConstantMarginModel, LinearLocationAllDimsMarginModel]}
+
+    def load_two_fold_fit(self, fit_method):
+        return TwoFoldFit(two_fold_datasets_generator=self.two_fold_datasets_generator,
+                          model_family_name_to_model_classes=self.model_family_name_to_model_class,
+                          fit_method=fit_method)
+
+    def test_best_fit_spatial_extreme(self):
+        two_fold_fit = self.load_two_fold_fit(fit_method=MarginFitMethod.spatial_extremes_mle)
+        best_model_class = two_fold_fit.massif_name_to_best_model()['Vercors']
+        self.assertEqual(best_model_class, ConstantMarginModel)
+
+
+if __name__ == '__main__':
+    unittest.main()