diff --git a/extreme_data/meteo_france_data/scm_models_data/altitudes_studies.py b/extreme_data/meteo_france_data/scm_models_data/altitudes_studies.py
index 730fa5eab8e83cea3dd99c7273ccf8063fb0f5ac..8cba3375a89ade55900301255a7ae151f90334ff 100644
--- a/extreme_data/meteo_france_data/scm_models_data/altitudes_studies.py
+++ b/extreme_data/meteo_france_data/scm_models_data/altitudes_studies.py
@@ -23,7 +23,6 @@ from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_
 from spatio_temporal_dataset.coordinates.temporal_coordinates.generated_temporal_coordinates import \
     ConsecutiveTemporalCoordinates
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.slicer.utils import get_slicer_class_from_s_splits
 from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima
 import matplotlib.pyplot as plt
 
@@ -92,22 +91,15 @@ class AltitudesStudies(object):
         else:
             assert len(massif_altitudes) > 0
             spatial_coordinates = self.spatial_coordinates_for_altitudes(massif_altitudes)
-        slicer_class = get_slicer_class_from_s_splits(s_split_spatial, s_split_temporal)
         if isinstance(self.study, AbstractAdamontStudy):
-            return SpatioTemporalCoordinatesForClimateModels(slicer_class=slicer_class,
-                                                             s_split_spatial=s_split_spatial,
-                                                             s_split_temporal=s_split_temporal,
-                                                             transformation_class=self.spatial_transformation_class,
+            return SpatioTemporalCoordinatesForClimateModels(transformation_class=self.spatial_transformation_class,
                                                              spatial_coordinates=spatial_coordinates,
                                                              temporal_coordinates=self.temporal_coordinates,
                                                              gcm_rcm_couple=self.study.gcm_rcm_couple,
                                                              scenario_str=scenario_to_str(self.study.scenario),
                                                              )
         else:
-            return AbstractSpatioTemporalCoordinates(slicer_class=slicer_class,
-                                                     s_split_spatial=s_split_spatial,
-                                                     s_split_temporal=s_split_temporal,
-                                                     transformation_class=self.spatial_transformation_class,
+            return AbstractSpatioTemporalCoordinates(transformation_class=self.spatial_transformation_class,
                                                      spatial_coordinates=spatial_coordinates,
                                                      temporal_coordinates=self.temporal_coordinates)
 
@@ -130,12 +122,6 @@ class AltitudesStudies(object):
         return AbstractSpatioTemporalCoordinates.get_df_from_spatial_and_temporal_coordinates(self.spatial_coordinates,
                                                                                               self.temporal_coordinates)
 
-    def random_s_split_spatial(self, train_split_ratio):
-        return AbstractCoordinates.spatial_s_split_from_df(self._df_coordinates, train_split_ratio)
-
-    def random_s_split_temporal(self, train_split_ratio):
-        return AbstractCoordinates.temporal_s_split_from_df(self._df_coordinates, train_split_ratio)
-
     # Some visualization
 
     def show_or_save_to_file(self, plot_name, show=False, no_title=False, tight_layout=None):
diff --git a/extreme_fit/estimator/abstract_estimator.py b/extreme_fit/estimator/abstract_estimator.py
index 98685cf3a3067f1b07e1492eb9db5d5cb08456ed..68c3bbf9b3f5fc8b291d3a5a8d772aeb8516dcd6 100644
--- a/extreme_fit/estimator/abstract_estimator.py
+++ b/extreme_fit/estimator/abstract_estimator.py
@@ -39,12 +39,6 @@ class AbstractEstimator(object):
     def function_from_fit(self) -> AbstractFunction:
         raise NotImplementedError
 
-    # Short cut properties
-
-    @property
-    def train_split(self):
-        return self.dataset.slicer.train_split
-
 
 
 
diff --git a/extreme_fit/estimator/full_estimator/abstract_full_estimator.py b/extreme_fit/estimator/full_estimator/abstract_full_estimator.py
index 022bb4d3947fae1785989f14a26de0400db43421..be66fb1b9a70cb23608c9bb92cc0fa9e62d43d8c 100644
--- a/extreme_fit/estimator/full_estimator/abstract_full_estimator.py
+++ b/extreme_fit/estimator/full_estimator/abstract_full_estimator.py
@@ -30,13 +30,12 @@ class SmoothMarginalsThenUnitaryMsp(AbstractFullEstimator):
         # Estimate the margin parameters
         self.margin_estimator.fit()
         # Compute the maxima_frech
-        maxima_gev_train = self.dataset.maxima_gev(split=self.train_split)
+        maxima_gev_train = self.dataset.maxima_gev
         maxima_frech = AbstractMarginModel.gev2frech(maxima_gev=maxima_gev_train,
-                                                     coordinates_values=self.dataset.coordinates_values(
-                                                         self.train_split),
+                                                     coordinates_values=self.dataset.coordinates_values(),
                                                      margin_function=self.margin_estimator.function_from_fit)
         # Update maxima frech field through the dataset object
-        self.dataset.set_maxima_frech(maxima_frech, split=self.train_split)
+        self.dataset.set_maxima_frech(maxima_frech)
         # Estimate the max stable parameters
         self.max_stable_estimator.fit()
 
@@ -67,17 +66,16 @@ class FullEstimatorInASingleStepWithSmoothMargin(AbstractFullEstimator):
 
     @property
     def df_coordinates_spat(self):
-        return self.dataset.coordinates.df_spatial_coordinates(self.train_split)
+        return self.dataset.coordinates.df_spatial_coordinates()
 
     @property
     def df_coordinates_temp(self):
-        return self.dataset.coordinates.df_temporal_coordinates_for_fit(split=self.train_split,
-                                                                        starting_point=self.linear_margin_model.starting_point)
+        return self.dataset.coordinates.df_temporal_coordinates_for_fit(starting_point=self.linear_margin_model.starting_point)
 
     def _fit(self):
         # Estimate both the margin and the max-stable structure
         return self.max_stable_model.fitmaxstab(
-            data_gev=self.dataset.maxima_gev_for_spatial_extremes_package(self.train_split),
+            data_gev=self.dataset.maxima_gev_for_spatial_extremes_package,
             df_coordinates_spat=self.df_coordinates_spat,
             df_coordinates_temp=self.df_coordinates_temp,
             fit_marge=True,
diff --git a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
index a721ede3659fd47f9549cc1b51636cde6cac8ab8..97a64880c93cfc371fee1d54050712d4f0b8b548 100644
--- a/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
+++ b/extreme_fit/estimator/margin_estimator/abstract_margin_estimator.py
@@ -14,14 +14,12 @@ from extreme_fit.function.margin_function.linear_margin_function import LinearMa
 from extreme_fit.model.margin_model.utils import MarginFitMethod
 from extreme_fit.model.result_from_model_fit.abstract_result_from_model_fit import AbstractResultFromModelFit
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.slicer.split import Split
-
 
 class AbstractMarginEstimator(AbstractEstimator, ABC):
 
     def __init__(self, dataset: AbstractDataset, **kwargs):
         super().__init__(dataset, **kwargs)
-        assert self.dataset.maxima_gev() is not None
+        assert self.dataset.maxima_gev is not None
 
 
 class LinearMarginEstimator(AbstractMarginEstimator):
@@ -33,49 +31,51 @@ class LinearMarginEstimator(AbstractMarginEstimator):
         self.margin_model = margin_model
 
     def _fit(self) -> AbstractResultFromModelFit:
-        data = self.data(self.train_split)
-        df_coordinate_temp = self.df_coordinates_temp(self.train_split)
-        df_coordinate_spat = self.df_coordinates_spat(self.train_split)
-        return self.margin_model.fitmargin_from_maxima_gev(data=data,
-                                                           df_coordinates_spat=df_coordinate_spat,
-                                                           df_coordinates_temp=df_coordinate_temp)
+        return self.margin_model.fitmargin_from_maxima_gev(data=self.data,
+                                                           df_coordinates_spat=self.df_coordinates_spat,
+                                                           df_coordinates_temp=self.df_coordinates_temp)
 
-    def data(self, split):
-        return self._maxima_gev(split)
+    @property
+    def data(self):
+        return self._maxima_gev
 
-    def _maxima_gev(self, split):
+    @property
+    def _maxima_gev(self):
         if self.margin_model.fit_method == MarginFitMethod.spatial_extremes_mle:
-            return self.dataset.maxima_gev_for_spatial_extremes_package(split)
+            return self.dataset.maxima_gev_for_spatial_extremes_package
         else:
-            return self.dataset.maxima_gev(split)
+            return self.dataset.maxima_gev
 
-    def df_coordinates_spat(self, split):
-        return self.dataset.coordinates.df_spatial_coordinates(split=split,
-                                                               drop_duplicates=self.margin_model.drop_duplicates)
+    @property
+    def df_coordinates_spat(self):
+        return self.dataset.coordinates.df_spatial_coordinates(drop_duplicates=self.margin_model.drop_duplicates)
 
-    def df_coordinates_temp(self, split):
-        return self.dataset.coordinates.df_temporal_coordinates_for_fit(split=split,
-                                                                        temporal_covariate_for_fit=self.margin_model.temporal_covariate_for_fit,
-                                                                        starting_point=self.margin_model.starting_point,
-                                                                        drop_duplicates=self.margin_model.drop_duplicates,
-                                                                        climate_coordinates_with_effects=self.margin_model.climate_coordinates_with_effects)
+    @property
+    def df_coordinates_temp(self):
+        return self.dataset.coordinates.df_temporal_coordinates_for_fit(
+            temporal_covariate_for_fit=self.margin_model.temporal_covariate_for_fit,
+            starting_point=self.margin_model.starting_point,
+            drop_duplicates=self.margin_model.drop_duplicates,
+            climate_coordinates_with_effects=self.margin_model.climate_coordinates_with_effects)
 
     @cached_property
     def function_from_fit(self) -> LinearMarginFunction:
         return load_margin_function(self, self.margin_model)
 
-    def coordinates_for_nllh(self, split=Split.all):
-        return pd.concat([self.df_coordinates_spat(split=split), self.df_coordinates_temp(split=split)], axis=1).values
+    @property
+    def coordinates_for_nllh(self):
+        return pd.concat([self.df_coordinates_spat, self.df_coordinates_temp], axis=1).values
 
-    def nllh(self, split=Split.all):
-        maxima_values = self.dataset.maxima_gev(split=split)
-        coordinate_values = self.coordinates_for_nllh(split=split)
+    @property
+    def nllh(self):
+        maxima_values = self.dataset.maxima_gev
+        coordinate_values = self.coordinates_for_nllh
         return compute_nllh(coordinate_values, maxima_values, self.function_from_fit)
 
-    def sorted_empirical_standard_gumbel_quantiles(self, split=Split.all, coordinate_for_filter=None):
+    def sorted_empirical_standard_gumbel_quantiles(self, coordinate_for_filter=None):
         sorted_empirical_quantiles = []
-        maxima_values = self.dataset.maxima_gev(split=split)
-        coordinate_values = self.dataset.df_coordinates(split=split).values
+        maxima_values = self.dataset.maxima_gev
+        coordinate_values = self.dataset.df_coordinates.values
         for maximum, coordinate in zip(maxima_values, coordinate_values):
             if coordinate_for_filter is not None:
                 assert len(coordinate) == len(coordinate_for_filter)
@@ -90,9 +90,9 @@ class LinearMarginEstimator(AbstractMarginEstimator):
         sorted_empirical_quantiles = sorted(sorted_empirical_quantiles)
         return sorted_empirical_quantiles
 
-    def coordinate_values_to_maxima_from_standard_gumbel_quantiles(self, standard_gumbel_quantiles, split=Split.all):
+    def coordinate_values_to_maxima_from_standard_gumbel_quantiles(self, standard_gumbel_quantiles):
         coordinate_values_to_maxima = {}
-        coordinate_values = self.dataset.df_coordinates(split=split).values
+        coordinate_values = self.dataset.df_coordinates.values
         assert len(standard_gumbel_quantiles) == len(coordinate_values)
         for quantile, coordinate in zip(standard_gumbel_quantiles, coordinate_values):
             gev_param = self.function_from_fit.get_params(
@@ -102,16 +102,19 @@ class LinearMarginEstimator(AbstractMarginEstimator):
             coordinate_values_to_maxima[tuple(coordinate)] = np.array([maximum])
         return coordinate_values_to_maxima
 
-    def deviance(self, split=Split.all):
-        return 2 * self.nllh(split=split)
+    @property
+    def deviance(self):
+        return 2 * self.nllh
 
-    def aic(self, split=Split.all):
-        aic = 2 * self.nb_params + 2 * self.nllh(split=split)
+    @property
+    def aic(self):
+        aic = 2 * self.nb_params + 2 * self.nllh
         npt.assert_almost_equal(self.result_from_model_fit.aic, aic, decimal=0)
         return aic
 
-    def n(self, split=Split.all):
-        return len(self.dataset.maxima_gev(split=split))
+    @property
+    def n(self):
+        return len(self.dataset.maxima_gev)
 
     @property
     def nb_params(self):
@@ -121,12 +124,15 @@ class LinearMarginEstimator(AbstractMarginEstimator):
             nb_params -= 1
         return nb_params
 
-    def bic(self, split=Split.all):
-        return np.log(self.n(split=split)) * self.nb_params + 2 * self.nllh(split=split)
+    @property
+    def bic(self):
+        return np.log(self.n) * self.nb_params + 2 * self.nllh
+
+    @property
+    def aicc(self):
+        additional_term = 2 * self.nb_params * (self.nb_params + 1) / (self.n - self.nb_params - 1)
+        return self.aic + additional_term
 
-    def aicc(self, split=Split.all):
-        additional_term = 2 * self.nb_params * (self.nb_params + 1) / (self.n() - self.nb_params - 1)
-        return self.aic(split) + additional_term
 
 def compute_nllh(coordinate_values, maxima_values, function_from_fit, maximum_from_obs=True, assertion_for_inf=True):
     nllh = 0
diff --git a/extreme_fit/estimator/max_stable_estimator/abstract_max_stable_estimator.py b/extreme_fit/estimator/max_stable_estimator/abstract_max_stable_estimator.py
index 837082fae13b03ae36ef5fadf80fa7cd83963312..1960057a572968aed2382277fadc74cd608a297d 100644
--- a/extreme_fit/estimator/max_stable_estimator/abstract_max_stable_estimator.py
+++ b/extreme_fit/estimator/max_stable_estimator/abstract_max_stable_estimator.py
@@ -15,13 +15,14 @@ class AbstractMaxStableEstimator(AbstractEstimator):
     def max_stable_params_fitted(self):
         raise NotImplementedError
 
+
 class MaxStableEstimator(AbstractMaxStableEstimator):
 
     def _fit(self):
-        assert self.dataset.maxima_frech(split=self.train_split) is not None
+        assert self.dataset.maxima_frech is not None
         return self.max_stable_model.fitmaxstab(
-            data_frech=self.dataset.maxima_frech_for_spatial_extremes_package(split=self.train_split),
-            df_coordinates_spat=self.dataset.df_coordinates(split=self.train_split))
+            data_frech=self.dataset.maxima_frech_for_spatial_extremes_package,
+            df_coordinates_spat=self.dataset.df_coordinates)
 
     @property
     def max_stable_params_fitted(self):
diff --git a/extreme_fit/function/margin_function/abstract_margin_function.py b/extreme_fit/function/margin_function/abstract_margin_function.py
index e3e70aa1e52efc1b8e2a8330b80bde9d574c9cdf..cd2e71c2013a5309ec41cd9866ef5e429bfccc5a 100644
--- a/extreme_fit/function/margin_function/abstract_margin_function.py
+++ b/extreme_fit/function/margin_function/abstract_margin_function.py
@@ -9,7 +9,6 @@ from extreme_fit.distribution.gev.gev_params import GevParams
 from extreme_data.meteo_france_data.scm_models_data.visualization.create_shifted_cmap import imshow_shifted
 from extreme_fit.function.abstract_function import AbstractFunction
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
-from spatio_temporal_dataset.slicer.split import Split
 from root_utils import cached_property
 
 
@@ -29,7 +28,6 @@ class AbstractMarginFunction(AbstractFunction):
         # Visualization parameters
         self.visualization_axes = None
         self.datapoint_display = False
-        self.spatio_temporal_split = Split.all
         self.datapoint_marker = 'o'
         self.color = 'skyblue'
         self.filter = None
@@ -75,11 +73,10 @@ class AbstractMarginFunction(AbstractFunction):
 
     # Visualization function
 
-    def set_datapoint_display_parameters(self, spatio_temporal_split=Split.all, datapoint_marker=None, filter=None,
+    def set_datapoint_display_parameters(self, datapoint_marker=None, filter=None,
                                          color=None,
                                          linewidth=1, datapoint_display=False):
         self.datapoint_display = datapoint_display
-        self.spatio_temporal_split = spatio_temporal_split
         self.datapoint_marker = datapoint_marker
         self.linewidth = linewidth
         self.filter = filter
@@ -143,13 +140,13 @@ class AbstractMarginFunction(AbstractFunction):
         # if self._grid_1D is None:
         #     self._grid_1D = self.get_grid_values_1D(x)
         # return self._grid_1D
-        return self.get_grid_values_1D(x, self.spatio_temporal_split)
+        return self.get_grid_values_1D(x)
 
-    def get_grid_values_1D(self, x, spatio_temporal_split):
+    def get_grid_values_1D(self, x):
         # TODO: to avoid getting the value several times, I could cache the results
         if self.datapoint_display:
             # todo: keep only the index of interest here
-            linspace = self.coordinates.coordinates_values(spatio_temporal_split)[:, 0]
+            linspace = self.coordinates.coordinates_values()[:, 0]
             if self.filter is not None:
                 linspace = linspace[self.filter]
             resolution = len(linspace)
diff --git a/extreme_trend/ensemble_fit/together_ensemble_fit/visualizer_non_stationary_ensemble.py b/extreme_trend/ensemble_fit/together_ensemble_fit/visualizer_non_stationary_ensemble.py
index fbe621fbae9506bd6d97ae69acce0da2d25ff8db..ec3a92dec17912783dfc4480ca0cde25bbb8cbbf 100644
--- a/extreme_trend/ensemble_fit/together_ensemble_fit/visualizer_non_stationary_ensemble.py
+++ b/extreme_trend/ensemble_fit/together_ensemble_fit/visualizer_non_stationary_ensemble.py
@@ -46,7 +46,6 @@ class VisualizerNonStationaryEnsemble(AltitudesStudiesVisualizerForNonStationary
         observations = AbstractSpatioTemporalObservations(df_maxima_gev=df_maxima_gev)
         df = pd.concat(df_coordinates_list, axis=0)
         df.index = index
-        coordinates = AbstractCoordinates(df=df,
-                                          slicer_class=type(dataset.slicer))
+        coordinates = AbstractCoordinates(df=df)
         dataset = AbstractDataset(observations=observations, coordinates=coordinates)
         return dataset
diff --git a/extreme_trend/one_fold_fit/one_fold_fit.py b/extreme_trend/one_fold_fit/one_fold_fit.py
index d9b6496881fc71ee6abf48ac84daae90fbc135c2..0ae4806a7a6f24eadf696a861b7db87aed182027 100644
--- a/extreme_trend/one_fold_fit/one_fold_fit.py
+++ b/extreme_trend/one_fold_fit/one_fold_fit.py
@@ -36,7 +36,6 @@ from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_
 from spatio_temporal_dataset.coordinates.temporal_coordinates.temperature_covariate import \
     AnomalyTemperatureWithSplineTemporalCovariate
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.slicer.split import Split
 from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima
 
 
@@ -190,7 +189,7 @@ class OneFoldFit(object):
             # Remove models with undefined parameters for the coordinate of interest
             well_defined_estimators = []
             for e in estimators:
-                coordinate_values_for_the_fit = e.coordinates_for_nllh(Split.all)
+                coordinate_values_for_the_fit = e.coordinates_for_nllh
 
                 if isinstance(self.altitude_group, DefaultAltitudeGroup):
                     coordinate_values_for_the_result = []
@@ -214,7 +213,7 @@ class OneFoldFit(object):
                 print(self.massif_name, " has only implausible models")
 
         try:
-            sorted_estimators = sorted([estimator for estimator in estimators], key=lambda e: e.aic())
+            sorted_estimators = sorted([estimator for estimator in estimators], key=lambda e: e.aic)
         except AssertionError as e:
             print('Error for')
             print(self.massif_name, self.altitude_group)
@@ -329,7 +328,7 @@ class OneFoldFit(object):
 
     @property
     def likelihood_ratio(self):
-        return self.stationary_estimator.deviance() - self.best_estimator.deviance()
+        return self.stationary_estimator.deviance - self.best_estimator.deviance
 
     @property
     def degree_freedom_chi2(self):
@@ -392,7 +391,7 @@ class OneFoldFit(object):
 
     @cached_property
     def best_residuals(self):
-        return self.best_estimator.sorted_empirical_standard_gumbel_quantiles(split=Split.all)
+        return self.best_estimator.sorted_empirical_standard_gumbel_quantiles()
 
     @cached_property
     def cached_results_from_bootstrap(self):
diff --git a/extreme_trend/trend_test/abstract_gev_trend_test.py b/extreme_trend/trend_test/abstract_gev_trend_test.py
index 8807a8a8633a770124d1ece89af4eb7a63a5d6ee..e8c147a57acede8f6dd8740b62873265ef1e5cc9 100644
--- a/extreme_trend/trend_test/abstract_gev_trend_test.py
+++ b/extreme_trend/trend_test/abstract_gev_trend_test.py
@@ -103,7 +103,7 @@ class AbstractGevTrendTest(object):
         aic = 2 * self.total_number_of_parameters_for_unconstrained_model + self.unconstrained_model_deviance
         assert np.equal(self.total_number_of_parameters_for_unconstrained_model, self.unconstrained_estimator.nb_params)
         npt.assert_almost_equal(self.unconstrained_estimator.result_from_model_fit.aic, aic, decimal=5)
-        npt.assert_almost_equal(self.unconstrained_estimator.aic(), aic, decimal=5)
+        npt.assert_almost_equal(self.unconstrained_estimator.aic, aic, decimal=5)
         return aic
 
     @property
diff --git a/extreme_trend/two_fold_fit/__init__.py b/extreme_trend/two_fold_fit/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/extreme_trend/two_fold_fit/two_fold_datasets_generator.py b/extreme_trend/two_fold_fit/two_fold_datasets_generator.py
deleted file mode 100644
index 7864f6f98ea6f7aba725724cee5e2f72583efbed..0000000000000000000000000000000000000000
--- a/extreme_trend/two_fold_fit/two_fold_datasets_generator.py
+++ /dev/null
@@ -1,40 +0,0 @@
-from typing import Tuple, Dict, List
-
-from cached_property import cached_property
-
-from extreme_data.meteo_france_data.scm_models_data.altitudes_studies import AltitudesStudies
-from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.slicer.split import invert_s_split
-
-
-class TwoFoldDatasetsGenerator(object):
-
-    def __init__(self, studies: AltitudesStudies, nb_samples, massif_names=None):
-        self.studies = studies
-        self.nb_samples = nb_samples
-        if massif_names is None:
-            self.massif_names = self.studies.study.all_massif_names()
-        else:
-            self.massif_names = massif_names
-
-    @cached_property
-    def massif_name_to_list_two_fold_datasets(self) -> Dict[str, List[Tuple[AbstractDataset, AbstractDataset]]]:
-        d = {}
-        for massif_name in self.massif_names:
-            l = []
-            for _ in range(self.nb_samples):
-                # Append to the list a new two fold dataset
-                l.append(self.two_fold_datasets(massif_name))
-            d[massif_name] = l
-        return d
-
-    def two_fold_datasets(self, massif_name: str) -> Tuple[AbstractDataset, AbstractDataset]:
-        # Create split for the 1st fold
-        s_split_temporal = self.studies.random_s_split_temporal(train_split_ratio=0.5)
-        dataset_fold_1 = self.studies.spatio_temporal_dataset(massif_name=massif_name,
-                                                              s_split_temporal=s_split_temporal)
-        # Invert the s_split for the 2nd fold
-        s_split_temporal_inverted = invert_s_split(s_split_temporal)
-        dataset_fold_2 = self.studies.spatio_temporal_dataset(massif_name=massif_name,
-                                                              s_split_temporal=s_split_temporal_inverted)
-        return dataset_fold_1, dataset_fold_2
diff --git a/extreme_trend/two_fold_fit/two_fold_detail_fit.py b/extreme_trend/two_fold_fit/two_fold_detail_fit.py
deleted file mode 100644
index 852213de774c70ad1bccf7474b3aeff08e3cf706..0000000000000000000000000000000000000000
--- a/extreme_trend/two_fold_fit/two_fold_detail_fit.py
+++ /dev/null
@@ -1,85 +0,0 @@
-from typing import List
-
-import numpy as np
-
-from extreme_fit.estimator.margin_estimator.abstract_margin_estimator import LinearMarginEstimator
-from extreme_fit.estimator.margin_estimator.utils import fitted_linear_margin_estimator_short
-from extreme_trend.two_fold_fit.utils import Grouping, get_key_with_min_value, Score
-from spatio_temporal_dataset.slicer.split import Split
-
-
-class TwoFoldMassifFit(object):
-
-    def __init__(self, model_classes, list_two_fold_datasets, **kargs):
-        self.model_classes = model_classes
-        self.sample_id_to_sample_fit = {
-            sample_id: TwoFoldSampleFit(model_classes, two_fold_datasets=two_fold_datasets, **kargs)
-            for sample_id, two_fold_datasets in enumerate(list_two_fold_datasets)
-        }
-
-    def best_model(self, score, group):
-        if group is Grouping.MEAN_RANKING:
-            return get_key_with_min_value(self.model_class_to_mean_ranking(score))
-        else:
-            raise NotImplementedError
-
-    def sample_id_to_ordered_model(self, score):
-        return {
-            sample_id: sample_fit.ordered_model_classes(score)
-            for sample_id, sample_fit in self.sample_id_to_sample_fit.items()
-        }
-
-    def model_class_to_scores(self):
-        pass
-
-    def model_class_to_rankings(self, score):
-        model_class_to_ranks = {model_class: [] for model_class in self.model_classes}
-        for ordered_model in self.sample_id_to_ordered_model(score=score).values():
-            for rank, model_class in enumerate(ordered_model):
-                model_class_to_ranks[model_class].append(rank)
-        return model_class_to_ranks
-
-    def model_class_to_mean_ranking(self, score):
-        return {
-            model_class: np.mean(ranks)
-            for model_class, ranks in self.model_class_to_rankings(score).items()
-        }
-
-
-class TwoFoldSampleFit(object):
-
-    def __init__(self, model_classes, **kargs):
-        self.model_classes = model_classes
-        self.model_class_to_model_fit = {
-            model_class: TwoFoldModelFit(model_class, **kargs) for model_class in self.model_classes
-        }
-
-    def ordered_model_classes(self, score):
-        # Always ordered from the lower score to the higher score.
-        key = lambda model_class: self.model_class_to_model_fit[model_class].score(score)
-        return sorted(self.model_classes, key=key)
-
-    def scores(self, score):
-        return [self.model_class_to_model_fit[model_class].score(score) for model_class in self.model_classes]
-
-
-class TwoFoldModelFit(object):
-
-    def __init__(self, model_class, two_fold_datasets, fit_method):
-        self.model_class = model_class
-        self.fit_method = fit_method
-        self.estimators = [fitted_linear_margin_estimator_short(model_class=self.model_class, dataset=dataset,
-                                                                fit_method=self.fit_method)
-                           for dataset in two_fold_datasets]  # type: List[LinearMarginEstimator]
-        self.estimator_fold_1 = self.estimators[0]
-        self.estimator_fold_2 = self.estimators[1]
-
-    def score(self, score):
-        if score == Score.NLLH_TEST:
-            return self.nllh_test_temporal
-        else:
-            raise NotImplementedError
-
-    @property
-    def nllh_test_temporal(self):
-        return sum([e.nllh(split=Split.test_temporal) for e in self.estimators])
diff --git a/extreme_trend/two_fold_fit/two_fold_fit.py b/extreme_trend/two_fold_fit/two_fold_fit.py
deleted file mode 100644
index dee938a22fd2885c7216930fb974dafc0df870a4..0000000000000000000000000000000000000000
--- a/extreme_trend/two_fold_fit/two_fold_fit.py
+++ /dev/null
@@ -1,38 +0,0 @@
-from typing import Dict, List
-
-from cached_property import cached_property
-
-from extreme_fit.model.margin_model.utils import \
-    MarginFitMethod
-from extreme_trend.two_fold_fit.two_fold_datasets_generator import TwoFoldDatasetsGenerator
-from extreme_trend.two_fold_fit.two_fold_detail_fit import TwoFoldMassifFit
-from extreme_trend.two_fold_fit.utils import Score, Grouping
-
-
-class TwoFoldFit(object):
-
-    def __init__(self, two_fold_datasets_generator: TwoFoldDatasetsGenerator,
-                 model_family_name_to_model_classes: Dict[str, List[type]],
-                 fit_method=MarginFitMethod.extremes_fevd_mle):
-        self.two_fold_datasets_generator = two_fold_datasets_generator
-        self.fit_method = fit_method
-        self.model_family_name_to_model_classes = model_family_name_to_model_classes
-
-        self.massif_name_to_massif_fit = {}
-        for massif_name, list_two_fold_datasets in self.two_fold_datasets_generator.massif_name_to_list_two_fold_datasets.items():
-            self.massif_name_to_massif_fit[massif_name] = TwoFoldMassifFit(model_classes=self.model_classes_to_fit,
-                                                                           list_two_fold_datasets=list_two_fold_datasets,
-                                                                           fit_method=self.fit_method)
-
-    @cached_property
-    def model_classes_to_fit(self):
-        return set().union(*[set(model_classes) for model_classes in self.model_family_name_to_model_classes.values()])
-
-    def model_family_name_to_best_model(self, score):
-        pass
-
-    def massif_name_to_best_model(self, score=Score.NLLH_TEST, group=Grouping.MEAN_RANKING):
-        return {
-            massif_name: massif_fit.best_model(score, group)
-            for massif_name, massif_fit in self.massif_name_to_massif_fit.items()
-        }
diff --git a/extreme_trend/two_fold_fit/utils.py b/extreme_trend/two_fold_fit/utils.py
deleted file mode 100644
index 45f31c7dc201008cf357ce9b4d02c7bec2cce7e0..0000000000000000000000000000000000000000
--- a/extreme_trend/two_fold_fit/utils.py
+++ /dev/null
@@ -1,21 +0,0 @@
-import operator
-from enum import Enum
-
-
-class Score(Enum):
-    NLLH_TEST = 1
-    AIC_TRAIN = 2
-    BIC_TRAIN = 3
-    SPAN_30_RETURN_LEVEL_2019_AT_1000M = 4
-
-
-class Grouping(Enum):
-    MEAN_RANKING = 1
-
-
-def get_key_with_max_value(d):
-    return max(d.items(), key=operator.itemgetter(1))[0]
-
-
-def get_key_with_min_value(d):
-    return min(d.items(), key=operator.itemgetter(1))[0]
diff --git a/spatio_temporal_dataset/coordinates/abstract_coordinates.py b/spatio_temporal_dataset/coordinates/abstract_coordinates.py
index a195fc6a8059d8eb34994646f8c2adfb048e89f0..0c45fc847d05bfd2a3c25ed6c3ebbb56a3223c92 100644
--- a/spatio_temporal_dataset/coordinates/abstract_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/abstract_coordinates.py
@@ -9,11 +9,6 @@ from mpl_toolkits.mplot3d import Axes3D
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.abstract_transformation import \
     AbstractTransformation, IdentityTransformation
 from spatio_temporal_dataset.coordinates.utils import get_index_without_spatio_temporal_index_suffix
-from spatio_temporal_dataset.slicer.abstract_slicer import AbstractSlicer, df_sliced
-from spatio_temporal_dataset.slicer.spatial_slicer import SpatialSlicer
-from spatio_temporal_dataset.slicer.spatio_temporal_slicer import SpatioTemporalSlicer
-from spatio_temporal_dataset.slicer.split import s_split_from_df, ind_train_from_s_split, Split
-from spatio_temporal_dataset.slicer.temporal_slicer import TemporalSlicer
 
 
 class AbstractCoordinates(object):
@@ -22,7 +17,6 @@ class AbstractCoordinates(object):
     Index are coordinates index
     Columns are the value of each coordinates
 
-    So far, the train_split_ratio is the same between the spatial part of the data, and the temporal part
     """
     # Spatial columns
     COORDINATE_X = 'coord_x'
@@ -32,7 +26,6 @@ class AbstractCoordinates(object):
     SPATIAL_SPLIT = 'spatial_split'
     # Temporal columns
     COORDINATE_T = 'coord_t'
-    TEMPORAL_SPLIT = 'temporal_split'
     # Climate model columns
     COORDINATE_RCP = 'coord_rcp'
     COORDINATE_GCM = 'coord_gcm'
@@ -44,8 +37,7 @@ class AbstractCoordinates(object):
     ALL_COORDINATES_ACCEPTED_TYPES = ['int64', 'float64']
     COORDINATE_TYPE = 'float64'
 
-    def __init__(self, df: pd.DataFrame, slicer_class: type, s_split_spatial: pd.Series = None,
-                 s_split_temporal: pd.Series = None, transformation_class: type = None):
+    def __init__(self, df: pd.DataFrame, transformation_class: type = None):
         # Extract df_all_coordinates from df
         coordinate_columns = [c for c in df.columns if c in self.COORDINATES_NAMES]
         assert len(coordinate_columns) > 0
@@ -56,12 +48,7 @@ class AbstractCoordinates(object):
         ind = self.df_all_coordinates.columns.isin(self.COORDINATE_CLIMATE_MODEL_NAMES)
         self.df_coordinate_climate_model = self.df_all_coordinates.loc[:, ind].copy()
         self.df_all_coordinates = self.df_all_coordinates.loc[:, ~ind] # type: pd.DataFrame
-        self.df_all_coordinates = self.df_all_coordinates.astype(self.COORDINATE_TYPE)
-
-        # Slicing attributes
-        self.s_split_spatial = s_split_spatial  # type: pd.Series
-        self.s_split_temporal = s_split_temporal  # type: pd.Series
-        self.slicer = None  # type: Union[None, AbstractSlicer]
+        self.df_all_coordinates = self.df_all_coordinates.astype(self.COORDINATE_TYPE)  # type: pd.DataFrame
 
         # Transformation attribute
         if transformation_class is None:
@@ -75,62 +62,17 @@ class AbstractCoordinates(object):
         self.transformation = transformation_class(self.df_all_coordinates)  # type: AbstractTransformation
         assert isinstance(self.transformation, AbstractTransformation)
 
-        # Load the slicer
-        if slicer_class is TemporalSlicer:
-            self.slicer = TemporalSlicer(self.ind_train_temporal)
-        elif slicer_class is SpatialSlicer:
-            self.slicer = SpatialSlicer(self.ind_train_spatial)
-        elif slicer_class is SpatioTemporalSlicer:
-            self.slicer = SpatioTemporalSlicer(self.ind_train_spatial, self.ind_train_temporal)
-        else:
-            raise ValueError("Unknown slicer_class: {}".format(slicer_class))
-
     # ClassMethod constructor
 
     @classmethod
     def from_df(cls, df: pd.DataFrame):
-        # Extract the split if they are specified
-        s_split_spatial = df[cls.SPATIAL_SPLIT].copy() if cls.SPATIAL_SPLIT in df.columns else None
-        s_split_temporal = df[cls.TEMPORAL_SPLIT].copy() if cls.TEMPORAL_SPLIT in df.columns else None
-
-        slicer_class = cls.slicer_class_from_s_splits(s_split_spatial, s_split_temporal)
-
-        return cls(df=df, slicer_class=slicer_class, s_split_spatial=s_split_spatial, s_split_temporal=s_split_temporal)
-
-    @classmethod
-    def slicer_class_from_s_splits(cls, s_split_spatial, s_split_temporal):
-        # Infer the slicer class
-        if s_split_temporal is None and s_split_spatial is None:
-            raise ValueError('Both split are unspecified')
-        elif s_split_temporal is not None and s_split_spatial is None:
-            slicer_class = TemporalSlicer
-        elif s_split_temporal is None and s_split_spatial is not None:
-            slicer_class = SpatialSlicer
-        else:
-            slicer_class = SpatioTemporalSlicer
-        return slicer_class
+        return cls(df=df)
 
     @classmethod
-    def from_df_and_slicer(cls, df: pd.DataFrame, slicer_class: type, train_split_ratio: float = None,
-                           transformation_class: type = None):
+    def from_df_and_transformation_class(cls, df: pd.DataFrame, transformation_class: type = None):
         # All the index should be unique
         assert len(set(df.index)) == len(df), 'df indices are not unique'
-
-        # Create a spatial split
-        s_split_spatial = cls.spatial_s_split_from_df(df, train_split_ratio)
-        # Create a temporal split
-        s_split_temporal = cls.temporal_s_split_from_df(df, train_split_ratio)
-
-        return cls(df=df, slicer_class=slicer_class, s_split_spatial=s_split_spatial, s_split_temporal=s_split_temporal,
-                   transformation_class=transformation_class)
-
-    @classmethod
-    def spatial_s_split_from_df(cls, df, train_split_ratio):
-        return s_split_from_df(df, cls.COORDINATE_X, cls.SPATIAL_SPLIT, train_split_ratio, True)
-
-    @classmethod
-    def temporal_s_split_from_df(cls, df, train_split_ratio):
-        return s_split_from_df(df, cls.COORDINATE_T, cls.TEMPORAL_SPLIT, train_split_ratio, False)
+        return cls(df=df, transformation_class=transformation_class)
 
     @classmethod
     def from_csv(cls, csv_path: str = None):
@@ -147,14 +89,9 @@ class AbstractCoordinates(object):
     def index(self) -> pd.Index:
         return self.df_all_coordinates.index
 
-    @property
-    def df_merged(self) -> pd.DataFrame:
-        # Merged DataFrame of df_coord with s_split
-        return self.df_coordinates().join(self.df_split)
-
     # Split
 
-    def df_coordinates(self, split: Split = Split.all, transformed=True, add_climate_informations=False) -> pd.DataFrame:
+    def df_coordinates(self, transformed=True, add_climate_informations=False) -> pd.DataFrame:
         if transformed:
             df_transformed_coordinates = self.transformation.transform_df(self.df_all_coordinates)
         else:
@@ -162,32 +99,10 @@ class AbstractCoordinates(object):
         if add_climate_informations:
             df_transformed_coordinates = pd.concat([df_transformed_coordinates,
                                                     self.df_coordinate_climate_model], axis=1)
-        return df_sliced(df=df_transformed_coordinates, split=split, slicer=self.slicer)
-
-    def coordinates_values(self, split: Split = Split.all, transformed=True) -> np.ndarray:
-        return self.df_coordinates(split, transformed=transformed).values
-
-    def coordinates_index(self, split: Split = Split.all) -> pd.Index:
-        return self.df_coordinates(split).index
+        return df_transformed_coordinates
 
-    @property
-    def ind_train_spatial(self) -> pd.Series:
-        return ind_train_from_s_split(s_split=self.s_split_spatial)
-
-    @property
-    def ind_train_temporal(self) -> pd.Series:
-        return ind_train_from_s_split(s_split=self.s_split_temporal)
-
-    @property
-    def df_split(self) -> pd.DataFrame:
-        split_name_to_s_split = {
-            self.SPATIAL_SPLIT: self.s_split_spatial,
-            self.TEMPORAL_SPLIT: self.s_split_temporal,
-        }
-        # Delete None s_split from the dictionary
-        split_name_to_s_split = {k: v for k, v in split_name_to_s_split.items() if v is not None}
-        # Create df_split from dict
-        return pd.DataFrame.from_dict(split_name_to_s_split)
+    def coordinates_values(self, transformed=True) -> np.ndarray:
+        return self.df_coordinates(transformed=transformed).values
 
     @property
     def coordinates_names(self) -> List[str]:
@@ -219,18 +134,20 @@ class AbstractCoordinates(object):
     def has_spatial_coordinates(self) -> bool:
         return self.nb_spatial_coordinates > 0
 
-    def df_spatial_coordinates(self, split: Split = Split.all, transformed=True, drop_duplicates=True) -> pd.DataFrame:
+    def df_spatial_coordinates(self, transformed=True, drop_duplicates=True) -> pd.DataFrame:
         if self.nb_spatial_coordinates == 0:
             return pd.DataFrame()
         else:
-            df = self.df_coordinates(split, transformed).loc[:, self.spatial_coordinates_names]
+            df = self.df_coordinates(transformed).loc[:, self.spatial_coordinates_names]
             return df.drop_duplicates() if drop_duplicates else df
 
-    def nb_stations(self, split: Split = Split.all) -> int:
-        return len(self.df_spatial_coordinates(split))
+    @property
+    def nb_stations(self) -> int:
+        return len(self.df_spatial_coordinates())
 
-    def spatial_index(self, split: Split = Split.all) -> pd.Index:
-        df_spatial = self.df_spatial_coordinates(split)
+    @property
+    def spatial_index(self) -> pd.Index:
+        df_spatial = self.df_spatial_coordinates()
         if self.has_spatio_temporal_coordinates:
             # Remove the spatio temporal index suffix
             return get_index_without_spatio_temporal_index_suffix(df_spatial)
@@ -251,26 +168,25 @@ class AbstractCoordinates(object):
     def has_temporal_coordinates(self) -> bool:
         return self.nb_temporal_coordinates > 0
 
-    def df_temporal_coordinates(self, split: Split = Split.all, transformed=True,
-                                drop_duplicates=True) -> pd.DataFrame:
+    def df_temporal_coordinates(self, transformed=True, drop_duplicates=True) -> pd.DataFrame:
         if self.nb_temporal_coordinates == 0:
             return pd.DataFrame()
         else:
-            df = self.df_coordinates(split, transformed=transformed).loc[:, self.temporal_coordinates_names]
+            df = self.df_coordinates(transformed=transformed).loc[:, self.temporal_coordinates_names]
             if drop_duplicates:
                 return df.drop_duplicates()
             else:
                 return df
 
-    def df_temporal_coordinates_for_fit(self, split=Split.all, starting_point=None,
+    def df_temporal_coordinates_for_fit(self, starting_point=None,
                                         temporal_covariate_for_fit: Union[None, type] = None,
                                         drop_duplicates=True, climate_coordinates_with_effects=None) -> pd.DataFrame:
         # Load time covariate
         if starting_point is None:
-            df = self.df_temporal_coordinates(split=split, transformed=True, drop_duplicates=drop_duplicates)
+            df = self.df_temporal_coordinates(transformed=True, drop_duplicates=drop_duplicates)
         else:
             # Load the un transformed coordinates
-            df_temporal_coordinates = self.df_temporal_coordinates(split=split, transformed=False)
+            df_temporal_coordinates = self.df_temporal_coordinates(transformed=False)
             # If starting point is not None, the transformation has not yet been applied
             # thus we need to modify the coordinate with the starting point, and then to apply the transformation
             # Compute the indices to modify
@@ -288,13 +204,13 @@ class AbstractCoordinates(object):
 
         # Potentially transform the time covariate into another covariate
         if temporal_covariate_for_fit is not None:
-            df_input = pd.concat([df, self.df_climate_models(split)], axis=1)
+            df_input = pd.concat([df, self.df_coordinate_climate_model], axis=1)
             df.loc[:, self.COORDINATE_T] = df_input.apply(temporal_covariate_for_fit.get_temporal_covariate, axis=1)
         if climate_coordinates_with_effects is not None:
             assert all([c in AbstractCoordinates.COORDINATE_CLIMATE_MODEL_NAMES for c in climate_coordinates_with_effects])
             for climate_coordinate in climate_coordinates_with_effects:
                 assert climate_coordinate in AbstractCoordinates.COORDINATE_CLIMATE_MODEL_NAMES
-                s, unique_values, unique_values_without_nan = self.load_unique_values(climate_coordinate, split)
+                s, unique_values, unique_values_without_nan = self.load_unique_values(climate_coordinate)
                 has_observations = len(unique_values) == len(unique_values_without_nan) + 1
                 if has_observations:
                     for v in unique_values_without_nan:
@@ -308,18 +224,18 @@ class AbstractCoordinates(object):
 
         return df
 
-    def load_unique_values(self, climate_coordinate, split=Split.all):
-        s = self.df_climate_models(split)[climate_coordinate]
+    def load_unique_values(self, climate_coordinate):
+        s = self.df_coordinate_climate_model[climate_coordinate]
         for character in self.character_to_remove_from_climate_model_coordinate_name():
             s = s.str.replace(character, "")
         unique_values = s.unique()
         unique_values_without_nan = [v for v in unique_values if isinstance(v, str)]
         return s, unique_values, unique_values_without_nan
 
-    def load_ordered_columns_names(self, climate_coordinates_names_with_effects, split=Split.all):
+    def load_ordered_columns_names(self, climate_coordinates_names_with_effects):
         column_names = []
         for climate_coordinate in climate_coordinates_names_with_effects:
-            _, _, names = self.load_unique_values(climate_coordinate, split)
+            _, _, names = self.load_unique_values(climate_coordinate)
             column_names.extend(names)
         return column_names
 
@@ -339,8 +255,8 @@ class AbstractCoordinates(object):
             climate_coordinates[indice] = 1
         return climate_coordinates
 
-    def df_climate_models(self, split=Split.all):
-        return df_sliced(df=self.df_coordinate_climate_model, split=split, slicer=self.slicer)
+    def df_climate_models(self):
+        return self.df_coordinate_climate_model
 
     @classmethod
     def character_to_remove_from_climate_model_coordinate_name(cls):
@@ -355,11 +271,12 @@ class AbstractCoordinates(object):
     def temporal_coordinates(self):
         raise NotImplementedError
 
-    def nb_steps(self, split: Split = Split.all) -> int:
-        return len(self.df_temporal_coordinates(split))
+    @property
+    def nb_steps(self) -> int:
+        return len(self.df_temporal_coordinates())
 
-    def df_temporal_range(self, split: Split = Split.all) -> Tuple[int, int]:
-        df_temporal_coordinates = self.df_temporal_coordinates(split)
+    def df_temporal_range(self) -> Tuple[int, int]:
+        df_temporal_coordinates = self.df_temporal_coordinates()
         return int(df_temporal_coordinates.min()), int(df_temporal_coordinates.max()),
 
     @property
@@ -376,8 +293,8 @@ class AbstractCoordinates(object):
     def has_spatio_temporal_coordinates(self) -> bool:
         return self.has_spatial_coordinates and self.has_temporal_coordinates
 
-    def spatio_temporal_shape(self, split: Split.all) -> Tuple[int, int]:
-        return len(self.df_spatial_coordinates(split)), len(self.df_temporal_coordinates(split))
+    def spatio_temporal_shape(self) -> Tuple[int, int]:
+        return len(self.df_spatial_coordinates()), len(self.df_temporal_coordinates())
 
     def ind_of_df_all_coordinates(self, coordinate_name, value):
         return self.df_all_coordinates.loc[:, coordinate_name] == value
@@ -447,8 +364,5 @@ class AbstractCoordinates(object):
     def __rmul__(self, other):
         return self * other
 
-    def __eq__(self, other):
-        return self.df_merged.equals(other.df_merged)
-
     def __str__(self):
         return pd.concat([self.df_coordinates(), self.df_coordinate_climate_model], axis=1).__str__()
diff --git a/spatio_temporal_dataset/coordinates/spatial_coordinates/abstract_spatial_coordinates.py b/spatio_temporal_dataset/coordinates/spatial_coordinates/abstract_spatial_coordinates.py
index 21d12f35525a9d6ce0e5623f5a534c36e292c9ce..7bbd27d20f6e87866f5e841e99d2859293fc4f05 100644
--- a/spatio_temporal_dataset/coordinates/spatial_coordinates/abstract_spatial_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/spatial_coordinates/abstract_spatial_coordinates.py
@@ -1,24 +1,25 @@
+from abc import ABC
+
 import pandas as pd
 
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
-from spatio_temporal_dataset.slicer.spatial_slicer import SpatialSlicer
 
 
-class AbstractSpatialCoordinates(AbstractCoordinates):
+class AbstractSpatialCoordinates(AbstractCoordinates, ABC):
 
     @classmethod
-    def from_list_x_coordinates(cls, x_coordinates, train_split_ratio: float = None, transformation_class: type = None):
+    def from_list_x_coordinates(cls, x_coordinates, transformation_class: type = None):
         df = pd.DataFrame({cls.COORDINATE_X: x_coordinates})
-        return cls.from_df(df, train_split_ratio, transformation_class)
+        return cls.from_df(df, transformation_class)
 
     @classmethod
-    def from_df(cls, df: pd.DataFrame, train_split_ratio: float = None, transformation_class: type = None):
+    def from_df(cls, df: pd.DataFrame, transformation_class: type = None):
         assert cls.COORDINATE_X in df.columns
         assert cls.COORDINATE_T not in df.columns
-        return super().from_df_and_slicer(df, SpatialSlicer, train_split_ratio, transformation_class)
+        return super().from_df_and_transformation_class(df, transformation_class)
 
     @classmethod
-    def from_nb_points(cls, nb_points: int, train_split_ratio: float = None, **kwargs):
+    def from_nb_points(cls, nb_points: int, **kwargs):
         # Call the default class method from csv
         coordinates = cls.from_csv()  # type: AbstractCoordinates
         # Check that nb_points asked is not superior to the number of coordinates
@@ -26,5 +27,5 @@ class AbstractSpatialCoordinates(AbstractCoordinates):
         if nb_points > nb_coordinates:
             raise Exception('Nb coordinates in csv: {} < Nb points desired: {}'.format(nb_coordinates, nb_points))
         # Sample randomly nb_points coordinates
-        df_sample = pd.DataFrame.sample(coordinates.df_merged, n=nb_points)
-        return cls.from_df(df=df_sample, train_split_ratio=train_split_ratio, **kwargs)
+        df_sample = pd.DataFrame.sample(coordinates.df_coordinates(), n=nb_points)
+        return cls.from_df(df=df_sample, **kwargs)
diff --git a/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_1D.py b/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_1D.py
index e5366354b2240b979efbc936c21eed360cc2cb2e..76d70d994b29a6de885a40fa2371cb553a8ee6e8 100644
--- a/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_1D.py
+++ b/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_1D.py
@@ -13,19 +13,19 @@ class AbstractUniDimensionalSpatialCoordinates(AbstractSpatialCoordinates):
 class LinSpaceSpatialCoordinates(AbstractUniDimensionalSpatialCoordinates):
 
     @classmethod
-    def from_nb_points(cls, nb_points, train_split_ratio: float = None, start=-1.0, end=1.0, **kwargs):
+    def from_nb_points(cls, nb_points: float = None, start=-1.0, end=1.0, **kwargs):
         axis_coordinates = np.linspace(start, end, nb_points)
         df = pd.DataFrame.from_dict({cls.COORDINATE_X: axis_coordinates})
-        return cls.from_df(df, train_split_ratio, **kwargs)
+        return cls.from_df(df, **kwargs)
 
 
 class UniformSpatialCoordinates(AbstractUniDimensionalSpatialCoordinates):
 
     @classmethod
-    def from_nb_points(cls, nb_points, train_split_ratio: float = None, start=-1.0, end=1.0, **kwargs):
+    def from_nb_points(cls, nb_points, start=-1.0, end=1.0, **kwargs):
         # Sample uniformly inside the circle
         df = cls.df_spatial(nb_points, start, end)
-        return cls.from_df(df, train_split_ratio, **kwargs)
+        return cls.from_df(df, **kwargs)
 
     @classmethod
     def df_spatial(cls, nb_points, start=-1.0, end=1.0):
diff --git a/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_2D.py b/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_2D.py
index a6c16539542c779740e9d146614ee06a900dd62d..c59dcf0ec1c111b28367cb7afc87b7e46090e3c2 100644
--- a/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_2D.py
+++ b/spatio_temporal_dataset/coordinates/spatial_coordinates/coordinates_2D.py
@@ -14,9 +14,9 @@ class AbstractBiDimensionalSpatialCoordinates(AbstractSpatialCoordinates):
 class LinSpaceSpatial2DCoordinates(AbstractBiDimensionalSpatialCoordinates):
 
     @classmethod
-    def from_nb_points(cls, nb_points, train_split_ratio: float = None, start=-1.0, end=1.0, **kwargs):
+    def from_nb_points(cls, nb_points, start=-1.0, end=1.0, **kwargs):
         df = cls.df_spatial(nb_points, start, end)
-        return cls.from_df(df, train_split_ratio, **kwargs)
+        return cls.from_df(df, **kwargs)
 
     @classmethod
     def df_spatial(cls, nb_points, start=-1.0, end=1.0):
diff --git a/spatio_temporal_dataset/coordinates/spatial_coordinates/generated_spatial_coordinates.py b/spatio_temporal_dataset/coordinates/spatial_coordinates/generated_spatial_coordinates.py
index a27bedba8c7d8f6ff3aae3e1435861170180dff2..b9fc200e89468f9bec6e2b578c30d8511700b7be 100644
--- a/spatio_temporal_dataset/coordinates/spatial_coordinates/generated_spatial_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/spatial_coordinates/generated_spatial_coordinates.py
@@ -21,10 +21,8 @@ class CircleSpatialCoordinates(AbstractSpatialCoordinates):
         return df
 
     @classmethod
-    def from_nb_points(cls, nb_points, train_split_ratio: float = None, max_radius=1.0, random=True,
-                       transformation_class=None):
-        return cls.from_df(df=cls.df_spatial(nb_points, max_radius, random),
-                           train_split_ratio=train_split_ratio, transformation_class=transformation_class)
+    def from_nb_points(cls, nb_points, max_radius=1.0, random=True, transformation_class=None):
+        return cls.from_df(df=cls.df_spatial(nb_points, max_radius, random), transformation_class=transformation_class)
 
     def visualization_2D(self):
         radius = 1.0
@@ -38,5 +36,5 @@ class CircleSpatialCoordinates(AbstractSpatialCoordinates):
 class CircleSpatialCoordinatesRadius2(CircleSpatialCoordinates):
 
     @classmethod
-    def from_nb_points(cls, nb_points, train_split_ratio: float = None, max_radius=1.0, random=True, **kwargs):
-        return 2 * super().from_nb_points(nb_points, train_split_ratio, max_radius, random, **kwargs)
+    def from_nb_points(cls, nb_points, max_radius=1.0, random=True, **kwargs):
+        return 2 * super().from_nb_points(nb_points, max_radius, random, **kwargs)
diff --git a/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/abstract_spatio_temporal_coordinates.py b/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/abstract_spatio_temporal_coordinates.py
index 2db7f9985d36144077ab52149925c4b7ee47418b..fe000b479f18de3227feefa0b4f83e6a66aa33a8 100644
--- a/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/abstract_spatio_temporal_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/abstract_spatio_temporal_coordinates.py
@@ -1,4 +1,3 @@
-import numpy as np
 import pandas as pd
 
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
@@ -9,18 +8,16 @@ from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.multiple_transformation import \
     MultipleTransformation
 from spatio_temporal_dataset.coordinates.utils import get_index_with_spatio_temporal_index_suffix
-from spatio_temporal_dataset.slicer.spatio_temporal_slicer import SpatioTemporalSlicer
 
 
 class AbstractSpatioTemporalCoordinates(AbstractCoordinates):
 
-    def __init__(self, df: pd.DataFrame = None, slicer_class: type = SpatioTemporalSlicer,
-                 s_split_spatial: pd.Series = None, s_split_temporal: pd.Series = None,
+    def __init__(self, df: pd.DataFrame = None,
                  transformation_class: type = None,
                  spatial_coordinates: AbstractSpatialCoordinates = None,
                  temporal_coordinates: AbstractTemporalCoordinates = None):
         df = self.load_df_is_needed(df, spatial_coordinates, temporal_coordinates)
-        super().__init__(df, slicer_class, s_split_spatial, s_split_temporal, None)
+        super().__init__(df, None)
         # Spatial coordinates'
         if spatial_coordinates is None:
             self._spatial_coordinates = AbstractSpatialCoordinates.from_df(
@@ -58,15 +55,7 @@ class AbstractSpatioTemporalCoordinates(AbstractCoordinates):
     def from_spatial_coordinates_and_temporal_coordinates(cls, spatial_coordinates: AbstractSpatialCoordinates,
                                                           temporal_coordinates: AbstractTemporalCoordinates):
         df = cls.get_df_from_spatial_and_temporal_coordinates(spatial_coordinates, temporal_coordinates)
-        return cls(df=df, slicer_class=SpatioTemporalSlicer,
-                   spatial_coordinates=spatial_coordinates, temporal_coordinates=temporal_coordinates)
-
-    @classmethod
-    def get_random_s_split_temporal(cls, spatial_coordinates: AbstractSpatialCoordinates,
-                                    temporal_coordinates: AbstractTemporalCoordinates,
-                                    train_split_ratio):
-        df = cls.get_df_from_spatial_and_temporal_coordinates(spatial_coordinates, temporal_coordinates)
-        return cls.temporal_s_split_from_df(df, train_split_ratio)
+        return cls(df=df, spatial_coordinates=spatial_coordinates, temporal_coordinates=temporal_coordinates)
 
     @classmethod
     def get_df_from_df_spatial_and_coordinate_t_values(cls, coordinate_t_values, df_spatial):
@@ -89,33 +78,29 @@ class AbstractSpatioTemporalCoordinates(AbstractCoordinates):
         return df
 
     @classmethod
-    def from_df(cls, df: pd.DataFrame, train_split_ratio: float = None, transformation_class: type = None):
+    def from_df(cls, df: pd.DataFrame, transformation_class: type = None):
         assert cls.COORDINATE_T in df.columns
         assert cls.COORDINATE_X in df.columns
         # Assert that the time steps are in the good order with respect to the coordinates
         nb_points = len(set(df[cls.COORDINATE_X]))
         first_time_step_for_all_points = df.iloc[:nb_points][cls.COORDINATE_T]
         assert len(set(first_time_step_for_all_points)) == 1
-        return super().from_df_and_slicer(df, SpatioTemporalSlicer, train_split_ratio, transformation_class)
+        return super().from_df_and_transformation_class(df, transformation_class)
 
     @classmethod
-    def from_df_spatial_and_coordinate_t_values(cls, df_spatial, coordinate_t_values, train_split_ratio: float = None,
+    def from_df_spatial_and_coordinate_t_values(cls, df_spatial, coordinate_t_values,
                                                 transformation_class: type = None):
         df_time_steps = cls.get_df_from_df_spatial_and_coordinate_t_values(coordinate_t_values, df_spatial)
-        return cls.from_df(df=df_time_steps, train_split_ratio=train_split_ratio,
-                           transformation_class=transformation_class)
+        return cls.from_df(df=df_time_steps, transformation_class=transformation_class)
 
     @classmethod
-    def from_df_spatial_and_nb_steps(cls, df_spatial, nb_steps, train_split_ratio: float = None, start=0,
+    def from_df_spatial_and_nb_steps(cls, df_spatial, nb_steps, start=0,
                                      transformation_class: type = None):
         coordinate_t_values = [start + t for t in range(nb_steps)]
-        return cls.from_df_spatial_and_coordinate_t_values(df_spatial, coordinate_t_values, train_split_ratio,
-                                                           transformation_class)
+        return cls.from_df_spatial_and_coordinate_t_values(df_spatial, coordinate_t_values, transformation_class)
 
     @classmethod
-    def from_df_spatial_and_df_temporal(cls, df_spatial, df_temporal, train_split_ratio: float = None,
-                                        transformation_class: type = None):
+    def from_df_spatial_and_df_temporal(cls, df_spatial, df_temporal, transformation_class: type = None):
         nb_steps = len(df_temporal)
         coordinate_t_values = [df_temporal.iloc[t].values[0] for t in range(nb_steps)]
-        return cls.from_df_spatial_and_coordinate_t_values(df_spatial, coordinate_t_values, train_split_ratio,
-                                                           transformation_class)
+        return cls.from_df_spatial_and_coordinate_t_values(df_spatial, coordinate_t_values, transformation_class)
diff --git a/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/generated_spatio_temporal_coordinates.py b/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/generated_spatio_temporal_coordinates.py
index 3c461629cb694c6b247652a022ac98ccb1117120..fe410332da38e21e299cf7ad189523e511245fea 100644
--- a/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/generated_spatio_temporal_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/generated_spatio_temporal_coordinates.py
@@ -10,13 +10,12 @@ class GeneratedSpatioTemporalCoordinates(AbstractSpatioTemporalCoordinates):
     SPATIAL_COORDINATES_CLASS = None
 
     @classmethod
-    def from_nb_points_and_nb_steps(cls, nb_points, nb_steps, train_split_ratio: float = None,
+    def from_nb_points_and_nb_steps(cls, nb_points, nb_steps,
                                     transformation_class: type = None):
         assert isinstance(nb_steps, int) and nb_steps >= 1
         assert hasattr(cls.spatial_coordinate_class(), 'df_spatial')
         df_spatial = cls.spatial_coordinate_class().df_spatial(nb_points=nb_points)
-        return cls.from_df_spatial_and_nb_steps(df_spatial, nb_steps, train_split_ratio,
-                                                transformation_class=transformation_class)
+        return cls.from_df_spatial_and_nb_steps(df_spatial, nb_steps, transformation_class=transformation_class)
 
     @classmethod
     def spatial_coordinate_class(cls):
diff --git a/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/spatio_temporal_coordinates_for_climate_models.py b/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/spatio_temporal_coordinates_for_climate_models.py
index 1f1a238edf77912572622cb40486abdf4a9444d5..2abb2d479e9a53dcf1c475fcf1ae7ea3f44518a9 100644
--- a/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/spatio_temporal_coordinates_for_climate_models.py
+++ b/spatio_temporal_dataset/coordinates/spatio_temporal_coordinates/spatio_temporal_coordinates_for_climate_models.py
@@ -6,13 +6,11 @@ from spatio_temporal_dataset.coordinates.spatio_temporal_coordinates.abstract_sp
     AbstractSpatioTemporalCoordinates
 from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \
     AbstractTemporalCoordinates
-from spatio_temporal_dataset.slicer.spatio_temporal_slicer import SpatioTemporalSlicer
 
 
 class SpatioTemporalCoordinatesForClimateModels(AbstractSpatioTemporalCoordinates):
 
-    def __init__(self, df: pd.DataFrame = None, slicer_class: type = SpatioTemporalSlicer,
-                 s_split_spatial: pd.Series = None, s_split_temporal: pd.Series = None,
+    def __init__(self, df: pd.DataFrame = None,
                  transformation_class: type = None, spatial_coordinates: AbstractSpatialCoordinates = None,
                  temporal_coordinates: AbstractTemporalCoordinates = None,
                  gcm_rcm_couple=None,
@@ -23,5 +21,4 @@ class SpatioTemporalCoordinatesForClimateModels(AbstractSpatioTemporalCoordinate
         df[self.COORDINATE_RCP] = scenario_str
         df[self.COORDINATE_GCM] = gcm
         df[self.COORDINATE_RCM] = rcm
-        super().__init__(df, slicer_class, s_split_spatial, s_split_temporal, transformation_class, spatial_coordinates,
-                         temporal_coordinates)
+        super().__init__(df, transformation_class, spatial_coordinates, temporal_coordinates)
diff --git a/spatio_temporal_dataset/coordinates/temporal_coordinates/abstract_temporal_coordinates.py b/spatio_temporal_dataset/coordinates/temporal_coordinates/abstract_temporal_coordinates.py
index c4362377471ffcf30014a3005d45695cd7d62033..8d1731c9dc9c61912a7f20621360f3207bc11181 100644
--- a/spatio_temporal_dataset/coordinates/temporal_coordinates/abstract_temporal_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/temporal_coordinates/abstract_temporal_coordinates.py
@@ -1,7 +1,6 @@
 import pandas as pd
 import numpy as np
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
-from spatio_temporal_dataset.slicer.temporal_slicer import TemporalSlicer
 
 
 class AbstractTemporalCoordinates(AbstractCoordinates):
@@ -15,7 +14,7 @@ class AbstractTemporalCoordinates(AbstractCoordinates):
         return self.transformation.transform_array(np.ones([1])) - self.transformation.transform_array(np.zeros([1]))
 
     @classmethod
-    def from_df(cls, df: pd.DataFrame, train_split_ratio: float = None, transformation_class: type = None):
+    def from_df(cls, df: pd.DataFrame, transformation_class: type = None):
         assert cls.COORDINATE_T in df.columns
         assert not any([coordinate_name in df.columns for coordinate_name in cls.COORDINATE_SPATIAL_NAMES])
-        return super().from_df_and_slicer(df, TemporalSlicer, train_split_ratio, transformation_class)
+        return super().from_df_and_transformation_class(df, transformation_class)
diff --git a/spatio_temporal_dataset/coordinates/temporal_coordinates/generated_temporal_coordinates.py b/spatio_temporal_dataset/coordinates/temporal_coordinates/generated_temporal_coordinates.py
index dda74952f49e6ced2f8dfd13ec8082a5b8d7cae8..bac9f8fb46f1e446d05217a0a82b3d50b7b38a88 100644
--- a/spatio_temporal_dataset/coordinates/temporal_coordinates/generated_temporal_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/temporal_coordinates/generated_temporal_coordinates.py
@@ -8,10 +8,10 @@ class ConsecutiveTemporalCoordinates(AbstractTemporalCoordinates):
     pass
 
     @classmethod
-    def from_nb_temporal_steps(cls, nb_temporal_steps, train_split_ratio: float = None, start=0,
+    def from_nb_temporal_steps(cls, nb_temporal_steps, start=0,
                                transformation_class: type = None):
         df = cls.df_temporal(nb_temporal_steps, start)
-        return cls.from_df(df, train_split_ratio, transformation_class=transformation_class)
+        return cls.from_df(df, transformation_class=transformation_class)
 
     @classmethod
     def df_temporal(cls, nb_temporal_steps, start=0):
diff --git a/spatio_temporal_dataset/coordinates/transformed_coordinates/transformed_coordinates.py b/spatio_temporal_dataset/coordinates/transformed_coordinates/transformed_coordinates.py
index 01a6811c0fc1e5540709adc2d202f1ecf2e5ffe9..d1c9509cbb405891e7f9f04aef25e201ed499c06 100644
--- a/spatio_temporal_dataset/coordinates/transformed_coordinates/transformed_coordinates.py
+++ b/spatio_temporal_dataset/coordinates/transformed_coordinates/transformed_coordinates.py
@@ -11,7 +11,6 @@ class TransformedCoordinates(AbstractCoordinates):
         df_coordinates = coordinates.df_all_coordinates.copy()
         transformation = transformation_class(df_coordinates)  # type: AbstractTransformation
         df_coordinates_transformed = transformation.transform_df(df_coordinates)
-        return cls(df=df_coordinates_transformed, slicer_class=type(coordinates.slicer),
-                   s_split_spatial=coordinates.s_split_spatial, s_split_temporal=coordinates.s_split_temporal)
+        return cls(df=df_coordinates_transformed)
 
 
diff --git a/spatio_temporal_dataset/dataset/abstract_dataset.py b/spatio_temporal_dataset/dataset/abstract_dataset.py
index 8c319931bf1dad3da623a1365e32a80022eeac2b..89fad9460891b7c54ee365e99c6eff1d2a48bc19 100644
--- a/spatio_temporal_dataset/dataset/abstract_dataset.py
+++ b/spatio_temporal_dataset/dataset/abstract_dataset.py
@@ -9,8 +9,6 @@ import pandas as pd
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 from spatio_temporal_dataset.coordinates.spatio_temporal_coordinates.abstract_spatio_temporal_coordinates import \
     AbstractSpatioTemporalCoordinates
-from spatio_temporal_dataset.slicer.abstract_slicer import AbstractSlicer
-from spatio_temporal_dataset.slicer.split import Split
 from spatio_temporal_dataset.spatio_temporal_observations.abstract_spatio_temporal_observations import \
     AbstractSpatioTemporalObservations
 
@@ -37,7 +35,7 @@ class AbstractDataset(object):
         # Create new coordinates
         coordinate_class = type(coordinates)
         new_df = coordinates.df_all_coordinates.loc[ind].copy()
-        new_coordinates = coordinate_class(df=new_df, slicer_class=type(coordinates.slicer))
+        new_coordinates = coordinate_class(df=new_df)
         return cls(new_observations, new_coordinates)
 
     @classmethod
@@ -67,27 +65,28 @@ class AbstractDataset(object):
     @property
     def df_dataset(self) -> pd.DataFrame:
         # Merge dataframes with the maxima and with the coordinates
-        return self.observations.df_maxima_merged.join(self.coordinates.df_merged)
+        return self.observations.df_maxima_merged.join(self.coordinates.df_coordinates())
 
     # Observation wrapper
 
-    def maxima_gev(self, split: Split = Split.all) -> np.ndarray:
-        return self.observations.maxima_gev(split, self.slicer)
+    @property
+    def maxima_gev(self) -> np.ndarray:
+        return self.observations.maxima_gev
 
-    def maxima_frech(self, split: Split = Split.all) -> np.ndarray:
-        return self.observations.maxima_frech(split, self.slicer)
+    @property
+    def maxima_frech(self) -> np.ndarray:
+        return self.observations.maxima_frech
 
-    def set_maxima_frech(self, maxima_frech_values: np.ndarray, split: Split = Split.all):
-        self.observations.set_maxima_frech(maxima_frech_values, split, self.slicer)
+    def set_maxima_frech(self, maxima_frech_values: np.ndarray):
+        self.observations.set_maxima_frech(maxima_frech_values)
 
     # Observation wrapper for fit function
 
-    def transform_maxima_for_spatial_extreme_package(self, maxima_function, split) -> np.ndarray:
-        array = maxima_function(split)
+    def transform_maxima_for_spatial_extreme_package(self, array) -> np.ndarray:
         if self.coordinates.has_spatio_temporal_coordinates:
             nb_obs = self.observations.nb_obs
-            nb_stations = self.coordinates.nb_stations(split)
-            nb_steps = self.coordinates.nb_steps(split)
+            nb_stations = self.coordinates.nb_stations
+            nb_steps = self.coordinates.nb_steps
             # Permute array lines
             time_steps = np.array(range(nb_steps))
             c = [time_steps * nb_stations + i for i in range(nb_stations)]
@@ -98,25 +97,22 @@ class AbstractDataset(object):
             array = array.reshape(shape)
         return np.transpose(array)
 
-    def maxima_gev_for_spatial_extremes_package(self, split: Split = Split.all) -> np.ndarray:
-        return self.transform_maxima_for_spatial_extreme_package(self.maxima_gev, split)
+    @property
+    def maxima_gev_for_spatial_extremes_package(self) -> np.ndarray:
+        return self.transform_maxima_for_spatial_extreme_package(self.maxima_gev)
 
-    def maxima_frech_for_spatial_extremes_package(self, split: Split = Split.all) -> np.ndarray:
-        return self.transform_maxima_for_spatial_extreme_package(self.maxima_frech, split)
+    @property
+    def maxima_frech_for_spatial_extremes_package(self) -> np.ndarray:
+        return self.transform_maxima_for_spatial_extreme_package(self.maxima_frech)
 
     # Coordinates wrapper
 
-    def df_coordinates(self, split: Split = Split.all) -> pd.DataFrame:
-        return self.coordinates.df_coordinates(split=split)
-
-    def coordinates_values(self, split: Split = Split.all) -> np.ndarray:
-        return self.coordinates.coordinates_values(split=split)
-
-    # Slicer wrapper
-
     @property
-    def slicer(self) -> AbstractSlicer:
-        return self.coordinates.slicer
+    def df_coordinates(self) -> pd.DataFrame:
+        return self.coordinates.df_coordinates()
+
+    def coordinates_values(self) -> np.ndarray:
+        return self.coordinates.coordinates_values()
 
     # Special methods
 
diff --git a/spatio_temporal_dataset/slicer/__init__.py b/spatio_temporal_dataset/slicer/__init__.py
deleted file mode 100644
index e69de29bb2d1d6434b8b29ae775ad8c2e48c5391..0000000000000000000000000000000000000000
diff --git a/spatio_temporal_dataset/slicer/abstract_slicer.py b/spatio_temporal_dataset/slicer/abstract_slicer.py
deleted file mode 100644
index cb6d98a5fe4bf6ded551a4424237e6dbc89e228d..0000000000000000000000000000000000000000
--- a/spatio_temporal_dataset/slicer/abstract_slicer.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from typing import Union, List
-
-import pandas as pd
-
-from spatio_temporal_dataset.slicer.split import Split
-
-
-class AbstractSlicer(object):
-
-    def __init__(self, ind_train_spatial: Union[None, pd.Series], ind_train_temporal: Union[None, pd.Series]):
-        self.ind_train_spatial = ind_train_spatial  # type: Union[None, pd.Series]
-        self.ind_train_temporal = ind_train_temporal  # type: Union[None, pd.Series]
-
-    @property
-    def ind_test_spatial(self) -> pd.Series:
-        return ~self.ind_train_spatial
-
-    @property
-    def ind_test_temporal(self) -> pd.Series:
-        return ~self.ind_train_temporal
-
-    def loc_split(self, df: pd.DataFrame, split: Split) -> pd.DataFrame:
-        # split should belong to the list of split accepted by the slicer
-        assert isinstance(split, Split)
-
-        if split is Split.all:
-            return df
-
-        assert split in self.splits, "Split and slicer_type do not correspond:\nsplit:{}, slicer_type:{}".format(split, type(self))
-
-        # By default, some required splits are not defined
-        # instead of crashing, we return all the data for all the split
-        # This is the default behavior, when the required splits has been defined
-        if self.some_required_ind_are_not_defined:
-            return df
-        else:
-            return self.specialized_loc_split(df=df, split=split)
-
-    def summary(self, show=True):
-        msg = ''
-        for s, global_name in [(self.ind_train_spatial, "Spatial"), (self.ind_train_temporal, "Temporal")]:
-            msg += global_name + ': '
-            if s is None:
-                msg += 'Not handled by this slicer'
-            else:
-                for f, name in [(len, 'Total'), (sum, 'train')]:
-                    msg += "{}: {} ".format(name, f(s))
-                msg += ' / '
-        if show:
-            print(msg)
-        return msg
-
-    # Methods that need to be defined in the child class
-
-    def specialized_loc_split(self, df: pd.DataFrame, split: Split) -> pd.DataFrame:
-        raise NotImplementedError
-
-    @property
-    def some_required_ind_are_not_defined(self) -> bool:
-        raise NotImplementedError
-
-    @property
-    def train_split(self) -> Split:
-        raise NotImplementedError
-
-    @property
-    def test_split(self) -> Split:
-        raise NotImplementedError
-
-    @property
-    def splits(self) -> List[Split]:
-        raise NotImplementedError
-
-
-def df_sliced(df: pd.DataFrame, split: Split = Split.all, slicer: AbstractSlicer = None) -> pd.DataFrame:
-    if slicer is None:
-        assert split is Split.all
-        return df
-    else:
-        return slicer.loc_split(df, split)
diff --git a/spatio_temporal_dataset/slicer/spatial_slicer.py b/spatio_temporal_dataset/slicer/spatial_slicer.py
deleted file mode 100644
index 7e2ed54d1a9cd5e4188d18bf7a2da1cc033ed07b..0000000000000000000000000000000000000000
--- a/spatio_temporal_dataset/slicer/spatial_slicer.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from typing import List, Union
-
-import pandas as pd
-
-from spatio_temporal_dataset.slicer.abstract_slicer import AbstractSlicer
-from spatio_temporal_dataset.slicer.split import Split
-
-
-class SpatialSlicer(AbstractSlicer):
-    SPLITS = [Split.train_spatial, Split.test_spatial]
-
-    def __init__(self, ind_train_spatial: Union[None, pd.Series]):
-        super().__init__(ind_train_spatial, None)
-
-    @property
-    def splits(self) -> List[Split]:
-        return self.SPLITS
-
-    @property
-    def train_split(self) -> Split:
-        return Split.train_spatial
-
-    @property
-    def test_split(self) -> Split:
-        return Split.test_spatial
-
-    @property
-    def some_required_ind_are_not_defined(self) -> bool:
-        return self.ind_train_spatial is None
-
-    def specialized_loc_split(self, df: pd.DataFrame, split: Split) -> pd.DataFrame:
-        assert pd.Index.equals(df.index, self.ind_train_spatial.index)
-        if split is Split.train_spatial:
-            return df.loc[self.ind_train_spatial]
-        elif split is Split.test_spatial:
-            return df.loc[self.ind_test_spatial]
diff --git a/spatio_temporal_dataset/slicer/spatio_temporal_slicer.py b/spatio_temporal_dataset/slicer/spatio_temporal_slicer.py
deleted file mode 100644
index 9ed09061bb2ed2b91e0865a56720c827eb97d522..0000000000000000000000000000000000000000
--- a/spatio_temporal_dataset/slicer/spatio_temporal_slicer.py
+++ /dev/null
@@ -1,41 +0,0 @@
-from typing import List
-
-import pandas as pd
-
-from spatio_temporal_dataset.slicer.abstract_slicer import AbstractSlicer
-from spatio_temporal_dataset.slicer.split import Split
-
-
-class SpatioTemporalSlicer(AbstractSlicer):
-    SPLITS = [Split.train_spatiotemporal,
-              Split.test_spatiotemporal,
-              Split.test_spatiotemporal_spatial,
-              Split.test_spatiotemporal_temporal]
-
-    @property
-    def splits(self) -> List[Split]:
-        return self.SPLITS
-
-    @property
-    def train_split(self) -> Split:
-        return Split.train_spatiotemporal
-
-    @property
-    def test_split(self) -> Split:
-        return Split.test_spatiotemporal
-
-    @property
-    def some_required_ind_are_not_defined(self) -> bool:
-        return self.ind_train_spatial is None or self.ind_train_temporal is None
-
-    def specialized_loc_split(self, df: pd.DataFrame, split: Split) -> pd.DataFrame:
-        assert pd.Index.equals(df.index, self.ind_train_temporal.index)
-        assert pd.Index.equals(df.index, self.ind_train_spatial.index)
-        if split is Split.train_spatiotemporal:
-            return df.loc[self.ind_train_spatial & self.ind_train_temporal]
-        elif split is Split.test_spatiotemporal:
-            return df.loc[self.ind_test_spatial & self.ind_test_temporal]
-        elif split is Split.test_spatiotemporal_spatial:
-            return df.loc[self.ind_test_spatial & self.ind_train_temporal]
-        elif split is Split.test_spatiotemporal_temporal:
-            return df.loc[self.ind_train_spatial & self.ind_test_temporal]
diff --git a/spatio_temporal_dataset/slicer/split.py b/spatio_temporal_dataset/slicer/split.py
deleted file mode 100644
index a5168fad71d0991571bff1b92f4a8a12999b54b8..0000000000000000000000000000000000000000
--- a/spatio_temporal_dataset/slicer/split.py
+++ /dev/null
@@ -1,80 +0,0 @@
-from enum import Enum
-import numpy as np
-from typing import Union
-
-import pandas as pd
-
-
-class Split(Enum):
-    all = 0
-    # SpatioTemporal splits
-    train_spatiotemporal = 1
-    test_spatiotemporal = 2
-    test_spatiotemporal_spatial = 3
-    test_spatiotemporal_temporal = 4
-    # Spatial splits
-    train_spatial = 5
-    test_spatial = 6
-    # Temporal splits
-    train_temporal = 7
-    test_temporal = 8
-
-
-ALL_SPLITS_EXCEPT_ALL = [split for split in Split if split is not Split.all]
-
-SPLIT_NAME = 'split'
-TRAIN_SPLIT_STR = 'train_split'
-TEST_SPLIT_STR = 'test_split'
-
-
-def invert_s_split(s_split):
-    ind = ind_train_from_s_split(s_split)
-    s_split.loc[ind] = TEST_SPLIT_STR
-    s_split.loc[~ind] = TRAIN_SPLIT_STR
-    return s_split
-
-
-def ind_train_from_s_split(s_split):
-    if s_split is None:
-        return None
-    else:
-        return s_split.isin([TRAIN_SPLIT_STR])
-
-
-def small_s_split_from_ratio(index: pd.Index, train_split_ratio):
-    length = len(index)
-    assert 0 < train_split_ratio < 1
-    s = pd.Series(TEST_SPLIT_STR, index=index)
-    nb_points_train = int(length * train_split_ratio)
-    assert 0 < nb_points_train < length
-    random_state = np.random.mtrand._rand
-    train_ind = pd.Series.sample(s, n=nb_points_train, random_state=random_state).index
-    assert 0 < len(train_ind) < length, "number of training points:{} length:{}".format(len(train_ind), length)
-    s.loc[train_ind] = TRAIN_SPLIT_STR
-    return s
-
-
-def s_split_from_df(df: pd.DataFrame, column, split_column, train_split_ratio, spatial_split) -> Union[None, pd.Series]:
-    df = df.copy()  # type: pd.DataFrame
-    # Extract the index
-    if train_split_ratio is None:
-        return None
-    if column not in df:
-        return None
-    elif split_column in df:
-        raise Exception('A split has already been defined')
-    else:
-        s = df.drop_duplicates(subset=[column], keep='first')[column]
-        assert len(df) % len(s) == 0
-        multiplication_factor = len(df) // len(s)
-        small_s_split = small_s_split_from_ratio(s.index, train_split_ratio)
-        if spatial_split:
-            # concatenation for spatial_split
-            s_split = pd.concat([small_s_split for _ in range(multiplication_factor)], ignore_index=True).copy()
-        else:
-            # dilatation for the temporal split
-            s_split = pd.Series(None, index=df.index)
-            for i in range(len(s_split)):
-                s_split.iloc[i] = small_s_split.iloc[i // multiplication_factor]
-        s_split.index = df.index
-        return s_split
diff --git a/spatio_temporal_dataset/slicer/temporal_slicer.py b/spatio_temporal_dataset/slicer/temporal_slicer.py
deleted file mode 100644
index 68d135a523d769ea0c4914b3cca03ec398ba1def..0000000000000000000000000000000000000000
--- a/spatio_temporal_dataset/slicer/temporal_slicer.py
+++ /dev/null
@@ -1,36 +0,0 @@
-from typing import List, Union
-
-import pandas as pd
-
-from spatio_temporal_dataset.slicer.abstract_slicer import AbstractSlicer
-from spatio_temporal_dataset.slicer.split import Split
-
-
-class TemporalSlicer(AbstractSlicer):
-    SPLITS = [Split.train_temporal, Split.test_temporal]
-
-    def __init__(self, ind_train_temporal: Union[None, pd.Series]):
-        super().__init__(None, ind_train_temporal)
-
-    @property
-    def splits(self) -> List[Split]:
-        return self.SPLITS
-
-    @property
-    def train_split(self) -> Split:
-        return Split.train_temporal
-
-    @property
-    def test_split(self) -> Split:
-        return Split.test_temporal
-
-    @property
-    def some_required_ind_are_not_defined(self) -> bool:
-        return self.ind_train_temporal is None
-
-    def specialized_loc_split(self, df: pd.DataFrame, split: Split) -> pd.DataFrame:
-        assert pd.Index.equals(df.index, self.ind_train_temporal.index)
-        if split is Split.train_temporal:
-            return df.loc[self.ind_train_temporal]
-        elif split is Split.test_temporal:
-            return df.loc[self.ind_test_temporal]
diff --git a/spatio_temporal_dataset/slicer/utils.py b/spatio_temporal_dataset/slicer/utils.py
deleted file mode 100644
index 20e34ce24c87c31168e30a78e4da385e33e93f71..0000000000000000000000000000000000000000
--- a/spatio_temporal_dataset/slicer/utils.py
+++ /dev/null
@@ -1,10 +0,0 @@
-from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
-from spatio_temporal_dataset.slicer.spatio_temporal_slicer import SpatioTemporalSlicer
-
-
-def get_slicer_class_from_s_splits(s_split_spatial, s_split_temporal):
-    if s_split_temporal is None and s_split_spatial is None:
-        return SpatioTemporalSlicer
-    else:
-        return AbstractCoordinates.slicer_class_from_s_splits(s_split_spatial=s_split_spatial,
-                                                          s_split_temporal=s_split_temporal)
diff --git a/spatio_temporal_dataset/spatio_temporal_observations/abstract_spatio_temporal_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/abstract_spatio_temporal_observations.py
index 8c1890b5a2ddf1e7e1b13b3ce25be388a6364314..cf0db6deee3ce5834152c7a1c832093cdd84d9d5 100644
--- a/spatio_temporal_dataset/spatio_temporal_observations/abstract_spatio_temporal_observations.py
+++ b/spatio_temporal_dataset/spatio_temporal_observations/abstract_spatio_temporal_observations.py
@@ -1,12 +1,9 @@
-import os.path as op
-import pandas as pd
 import numpy as np
+import pandas as pd
 
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
 from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_coordinates import \
     AbstractTemporalCoordinates
-from spatio_temporal_dataset.slicer.abstract_slicer import df_sliced, AbstractSlicer
-from spatio_temporal_dataset.slicer.split import Split
 
 
 class AbstractSpatioTemporalObservations(object):
@@ -72,7 +69,7 @@ class AbstractSpatioTemporalObservations(object):
     def convert_to_spatio_temporal_index(self, coordinates: AbstractCoordinates):
         assert coordinates.has_spatio_temporal_coordinates
         assert len(coordinates.index) == len(self.index) * self.nb_obs
-        assert pd.Index.equals(self.index, coordinates.spatial_index())
+        assert pd.Index.equals(self.index, coordinates.spatial_index)
         self.df_maxima_frech = self.flatten_df(self.df_maxima_frech, coordinates.index)
         self.df_maxima_gev = self.flatten_df(self.df_maxima_gev, coordinates.index)
 
@@ -99,16 +96,16 @@ class AbstractSpatioTemporalObservations(object):
             df.index = new_index
             return df
 
-    def maxima_gev(self, split: Split = Split.all, slicer: AbstractSlicer = None) -> np.ndarray:
-        return df_sliced(self.df_maxima_gev, split, slicer).values
+    @property
+    def maxima_gev(self) -> np.ndarray:
+        return self.df_maxima_gev.values
 
-    def maxima_frech(self, split: Split = Split.all, slicer: AbstractSlicer = None) -> np.ndarray:
-        return df_sliced(self.df_maxima_frech, split, slicer).values
+    @property
+    def maxima_frech(self) -> np.ndarray:
+        return self.df_maxima_frech.values
 
-    def set_maxima_frech(self, maxima_frech_values: np.ndarray, split: Split = Split.all,
-                         slicer: AbstractSlicer = None):
-        df = df_sliced(self.df_maxima_frech, split, slicer)
-        df.loc[:] = maxima_frech_values
+    def set_maxima_frech(self, maxima_frech_values: np.ndarray):
+        self.df_maxima_frech.loc[:] = maxima_frech_values
 
     def __str__(self) -> str:
         return self._df_maxima.__str__()
diff --git a/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py
index abd109d1f96f74df3afff1e706a4b795af3fa5a3..61c35fb446548b36d9921b3541541eb4d10ca2ab 100644
--- a/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py
+++ b/spatio_temporal_dataset/spatio_temporal_observations/annual_maxima_observations.py
@@ -77,7 +77,7 @@ class FullAnnualMaxima(MaxStableAnnualMaxima):
                              coordinates: AbstractCoordinates, margin_model: AbstractMarginModel):
         max_stable_annual_maxima = super().from_sampling(nb_obs, max_stable_model, coordinates)
         #  Compute df_maxima_gev from df_maxima_frech
-        maxima_gev = margin_model.rmargin_from_maxima_frech(maxima_frech=max_stable_annual_maxima.maxima_frech(),
+        maxima_gev = margin_model.rmargin_from_maxima_frech(maxima_frech=max_stable_annual_maxima.maxima_frech,
                                                             coordinates_values=coordinates.coordinates_values())
         max_stable_annual_maxima.df_maxima_gev = pd.DataFrame(data=maxima_gev, index=coordinates.index)
         return max_stable_annual_maxima
@@ -90,12 +90,12 @@ class FullSpatioTemporalAnnualMaxima(MaxStableAnnualMaxima):
                              coordinates: AbstractSpatioTemporalCoordinates, margin_model: AbstractMarginModel):
         # Sample with the max stable spatially
         spatial_coordinate = coordinates.spatial_coordinates
-        nb_total_obs = nb_obs * coordinates.nb_steps()
+        nb_total_obs = nb_obs * coordinates.nb_steps
         max_stable_annual_maxima = super().from_sampling(nb_total_obs, max_stable_model, spatial_coordinate)
         # Convert observation to a spatio temporal index
         max_stable_annual_maxima.convert_to_spatio_temporal_index(coordinates)
         #  Compute df_maxima_gev from df_maxima_frech
-        maxima_gev = margin_model.rmargin_from_maxima_frech(maxima_frech=max_stable_annual_maxima.maxima_frech(),
+        maxima_gev = margin_model.rmargin_from_maxima_frech(maxima_frech=max_stable_annual_maxima.maxima_frech,
                                                             coordinates_values=coordinates.coordinates_values())
         max_stable_annual_maxima.df_maxima_gev = pd.DataFrame(data=maxima_gev, index=coordinates.index)
         return max_stable_annual_maxima
diff --git a/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py b/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py
index 88bca20bb652832c71f7f100bbff25c718b59250..204d8d5509ddaf5f404b095f94acb82f39a4b98e 100644
--- a/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py
+++ b/spatio_temporal_dataset/spatio_temporal_observations/daily_observations.py
@@ -13,9 +13,8 @@ class DailyObservations(AbstractSpatioTemporalObservations):
     def transform_to_standard_shape(self, coordinates: AbstractTemporalCoordinates):
         assert isinstance(coordinates, AbstractTemporalCoordinates)
         df_coordinates = pd.concat([coordinates.df_all_coordinates for _ in range(self.nb_obs)])
-        df_coordinates.index = pd.Index(range(self.nb_obs * coordinates.nb_steps()))
-        coordinates = AbstractTemporalCoordinates.from_df(df_coordinates, train_split_ratio=None,
-                                                          transformation_class=coordinates.transformation_class)
+        df_coordinates.index = pd.Index(range(self.nb_obs * coordinates.nb_steps))
+        coordinates = AbstractTemporalCoordinates.from_df(df_coordinates, transformation_class=coordinates.transformation_class)
         df = pd.DataFrame(pd.concat([self.df_maxima_gev[c] for c in self.columns]))
         df.index = coordinates.index
         observation = AbstractSpatioTemporalObservations(df_maxima_gev=df)
diff --git a/test/test_extreme_data/test_meteo_france_data/test_altitudes_studies.py b/test/test_extreme_data/test_meteo_france_data/test_altitudes_studies.py
index 0c34971cefba5eb9110d174d184899de9979a47b..e251378fec44b622e590fa0aa637f821ea1a2595 100644
--- a/test/test_extreme_data/test_meteo_france_data/test_altitudes_studies.py
+++ b/test/test_extreme_data/test_meteo_france_data/test_altitudes_studies.py
@@ -4,8 +4,6 @@ from extreme_data.meteo_france_data.adamont_data.adamont.adamont_safran import A
 from extreme_data.meteo_france_data.adamont_data.adamont_scenario import AdamontScenario
 from extreme_data.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall1Day
 from extreme_data.meteo_france_data.scm_models_data.altitudes_studies import AltitudesStudies
-from spatio_temporal_dataset.slicer.split import Split
-
 
 class TestAltitudesStudies(unittest.TestCase):
 
@@ -21,64 +19,6 @@ class TestVisualization(TestAltitudesStudies):
     def test_plot_maxima_time_series(self):
         self.studies.plot_maxima_time_series(massif_names=['Vercors'], show=False)
 
-
-class TestSpatioTemporalCoordinates(TestAltitudesStudies):
-
-    def test_temporal_split(self):
-        s_split_temporal = self.studies.random_s_split_temporal(train_split_ratio=0.75)
-        coordinates = self.studies.spatio_temporal_coordinates(s_split_temporal=s_split_temporal)
-        self.assertEqual(coordinates.coordinates_values(split=Split.train_temporal).shape, (6, 2))
-        self.assertEqual(coordinates.coordinates_values(split=Split.test_temporal).shape, (2, 2))
-
-    def test_spatial_split(self):
-        s_split_spatial = self.studies.random_s_split_spatial(train_split_ratio=0.5)
-        coordinates = self.studies.spatio_temporal_coordinates(s_split_spatial=s_split_spatial)
-        self.assertEqual(coordinates.coordinates_values(split=Split.train_spatial).shape, (4, 2))
-        self.assertEqual(coordinates.coordinates_values(split=Split.test_spatial).shape, (4, 2))
-
-    def test_spatio_temporal_split(self):
-        s_split_spatial = self.studies.random_s_split_spatial(train_split_ratio=0.5)
-        s_split_temporal = self.studies.random_s_split_temporal(train_split_ratio=0.75)
-        coordinates = self.studies.spatio_temporal_coordinates(s_split_spatial=s_split_spatial,
-                                                               s_split_temporal=s_split_temporal)
-        self.assertEqual(coordinates.coordinates_values(split=Split.train_spatiotemporal).shape, (3, 2))
-        self.assertEqual(coordinates.coordinates_values(split=Split.test_spatiotemporal_spatial).shape, (3, 2))
-        self.assertEqual(coordinates.coordinates_values(split=Split.test_spatiotemporal_temporal).shape, (1, 2))
-        self.assertEqual(coordinates.coordinates_values(split=Split.test_spatiotemporal).shape, (1, 2))
-
-
-class TestSpatioTemporalDataset(TestAltitudesStudies):
-
-    def setUp(self) -> None:
-        super().setUp()
-        self.massif_name = "Vercors"
-
-    def test_temporal_split(self):
-        s_split_temporal = self.studies.random_s_split_temporal(train_split_ratio=0.75)
-        dataset = self.studies.spatio_temporal_dataset(massif_name=self.massif_name,
-                                                       s_split_temporal=s_split_temporal)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.train_temporal)), 6)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.test_temporal)), 2)
-
-    def test_spatial_split(self):
-        s_split_spatial = self.studies.random_s_split_spatial(train_split_ratio=0.5)
-        dataset = self.studies.spatio_temporal_dataset(massif_name=self.massif_name,
-                                                       s_split_spatial=s_split_spatial)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.train_spatial)), 4)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.test_spatial)), 4)
-
-    def test_spatio_temporal_split(self):
-        s_split_spatial = self.studies.random_s_split_spatial(train_split_ratio=0.5)
-        s_split_temporal = self.studies.random_s_split_temporal(train_split_ratio=0.75)
-        dataset = self.studies.spatio_temporal_dataset(massif_name=self.massif_name,
-                                                       s_split_spatial=s_split_spatial,
-                                                       s_split_temporal=s_split_temporal)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.train_spatiotemporal)), 3)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.test_spatiotemporal)), 1)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.test_spatiotemporal_temporal)), 1)
-        self.assertEqual(len(dataset.maxima_gev(split=Split.test_spatiotemporal_spatial)), 3)
-
-
 class TestSpatioTemporalDatasetForClimateModels(unittest.TestCase):
 
     def setUp(self) -> None:
diff --git a/test/test_extreme_fit/test_estimator/test_gev_spatio_temporal_extremes_mle.py b/test/test_extreme_fit/test_estimator/test_gev_spatio_temporal_extremes_mle.py
deleted file mode 100644
index b9fe08c65cb7d1cbef6b58df6e3ea7258c22c706..0000000000000000000000000000000000000000
--- a/test/test_extreme_fit/test_estimator/test_gev_spatio_temporal_extremes_mle.py
+++ /dev/null
@@ -1,58 +0,0 @@
-import unittest
-
-from extreme_data.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall1Day
-from extreme_fit.model.margin_model.utils import \
-    MarginFitMethod
-from extreme_data.meteo_france_data.scm_models_data.altitudes_studies import AltitudesStudies
-from extreme_trend.two_fold_fit.two_fold_datasets_generator import \
-    TwoFoldDatasetsGenerator
-from extreme_trend.two_fold_fit.two_fold_fit import TwoFoldFit
-
-
-class TestGevTemporalQuadraticExtremesMle(unittest.TestCase):
-
-    def setUp(self) -> None:
-        self.altitudes = [900, 1200]
-        self.massif_name = 'Vercors'
-        self.study_class = SafranSnowfall1Day
-
-    def get_estimator_fitted(self, model_class):
-        studies = AltitudesStudies(self.study_class, self.altitudes, year_max=2019)
-        two_fold_datasets_generator = TwoFoldDatasetsGenerator(studies, nb_samples=1, massif_names=[self.massif_name])
-        model_family_name_to_model_class = {'Non stationary': [model_class]}
-        two_fold_fit = TwoFoldFit(two_fold_datasets_generator=two_fold_datasets_generator,
-                                  model_family_name_to_model_classes=model_family_name_to_model_class,
-                                  fit_method=MarginFitMethod.extremes_fevd_mle)
-        massif_fit = two_fold_fit.massif_name_to_massif_fit[self.massif_name]
-        sample_fit = massif_fit.sample_id_to_sample_fit[0]
-        model_fit = sample_fit.model_class_to_model_fit[model_class]  # type: TwoFoldModelFit
-        estimator = model_fit.estimator_fold_1
-        return estimator
-
-    def common_test(self, model_class):
-        estimator = self.get_estimator_fitted(model_class)
-        # Assert that indicators are correctly computed
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh(split=estimator.train_split))
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic(split=estimator.train_split))
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic(split=estimator.train_split))
-
-    # def test_assert_error(self):
-    #     for model_class in MODELS_THAT_SHOULD_RAISE_AN_ASSERTION_ERROR:
-    #         with self.assertRaises(AssertionError):
-    #             self.common_test(model_class)
-
-    # def test_location_spatio_temporal_models(self):
-    #     for model_class in VARIOUS_SPATIO_TEMPORAL_MODELS:
-    #         self.common_test(model_class)
-
-    # def test_altitudinal_gev_models(self):
-    #     for model_class in ALTITUDINAL_GEV_MODELS:
-    #         self.common_test(model_class)
-
-    # def test_altitudinal_gumbel_models(self):
-    #     for model_class in ALTITUDINAL_GUMBEL_MODELS[:]:
-    #         self.common_test(model_class)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/test_extreme_fit/test_estimator/test_spatio_temporal_estimator/test_gev_spatio_temporal_polynomial_extremes_mle.py b/test/test_extreme_fit/test_estimator/test_spatio_temporal_estimator/test_gev_spatio_temporal_polynomial_extremes_mle.py
index e0ba51f12a64e7ad19348c2dddc07ac80df9b5bb..1725c32d124c7499bb9fc442c630252ceb427822 100644
--- a/test/test_extreme_fit/test_estimator/test_spatio_temporal_estimator/test_gev_spatio_temporal_polynomial_extremes_mle.py
+++ b/test/test_extreme_fit/test_estimator/test_spatio_temporal_estimator/test_gev_spatio_temporal_polynomial_extremes_mle.py
@@ -60,9 +60,9 @@ class TestGevTemporalQuadraticExtremesMle(unittest.TestCase):
                                                          fit_method=self.fit_method)
 
         # Assert that indicators are correctly computed
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
         # Assert we can compute the return level
         covariate1_for_return_level = np.array([500, 0])
         covariate2_for_return_level = np.array([500, 50])
diff --git a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_extremes_mle.py b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_extremes_mle.py
index d0682e18821c8ffbc0a7b962c6b11c0bd519d014..ebade9d06d248f048e1878fa4528fa78b78d1721 100644
--- a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_extremes_mle.py
+++ b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_extremes_mle.py
@@ -46,9 +46,9 @@ class TestGevTemporalExtremesMle(unittest.TestCase):
             mle_params_estimated = estimator.function_from_fit.get_params(np.array([year])).to_dict()
             for key in ref.keys():
                 self.assertAlmostEqual(ref[key], mle_params_estimated[key], places=3)
-            self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-            self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-            self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+            self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+            self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+            self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
 
     def test_gev_temporal_margin_fit_non_stationary_location(self):
         # Create estimator
@@ -59,9 +59,9 @@ class TestGevTemporalExtremesMle(unittest.TestCase):
         mle_params_estimated_year1 = estimator.function_from_fit.get_params(np.array([1])).to_dict()
         mle_params_estimated_year3 = estimator.function_from_fit.get_params(np.array([3])).to_dict()
         self.assertNotEqual(mle_params_estimated_year1, mle_params_estimated_year3)
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
 
     def test_gev_temporal_margin_fit_non_stationary_location_and_scale(self):
         # Create estimator
@@ -73,9 +73,9 @@ class TestGevTemporalExtremesMle(unittest.TestCase):
         mle_params_estimated_year1 = estimator.function_from_fit.get_params(np.array([1])).to_dict()
         mle_params_estimated_year3 = estimator.function_from_fit.get_params(np.array([3])).to_dict()
         self.assertNotEqual(mle_params_estimated_year1, mle_params_estimated_year3)
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
 
 
 if __name__ == '__main__':
diff --git a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_evgam.py b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_evgam.py
index 94379152c219a8c7b0cc3c9d37680e02997b74de..2dd680eb50bfe079282ecd708f8eadf4a2cc012d 100644
--- a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_evgam.py
+++ b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_evgam.py
@@ -54,9 +54,9 @@ class TestGevTemporalPolynomialEvgam(unittest.TestCase):
         diff2 = mle_params_estimated_year3[quadratic_param] - mle_params_estimated_year5[quadratic_param]
         self.assertNotAlmostEqual(diff1, diff2)
         # Assert that indicators are correctly computed
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
 
     def test_gev_temporal_margin_fit_non_stationary_quadratic_location(self):
         self.function_test_gev_temporal_margin_fit_non_stationary_quadratic(NonStationaryQuadraticLocationModel,
diff --git a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_extremes_mle.py b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_extremes_mle.py
index 95add8ef2e62f69e0c9b8d4f321a4e5f86c9f37a..82386487164a0517a1bfeab1078e3c78b7ce4c43 100644
--- a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_extremes_mle.py
+++ b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_polynomial_extremes_mle.py
@@ -54,9 +54,9 @@ class TestGevTemporalQuadraticExtremesMle(unittest.TestCase):
         diff2 = mle_params_estimated_year3[quadratic_param] - mle_params_estimated_year5[quadratic_param]
         self.assertNotAlmostEqual(diff1, diff2)
         # Assert that indicators are correctly computed
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
 
     def test_gev_temporal_margin_fit_non_stationary_quadratic_location(self):
         self.function_test_gev_temporal_margin_fit_non_stationary_quadratic(NonStationaryQuadraticLocationModel,
diff --git a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_spline.py b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_spline.py
index 7ba5d3fc76b0af5e06fc70c5b9ac8d6b7c8daefe..149bc07efd1184f2ddbb85a8d2482da5b7cb118f 100644
--- a/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_spline.py
+++ b/test/test_extreme_fit/test_estimator/test_temporal_estimator/test_gev_temporal_spline.py
@@ -77,9 +77,9 @@ class TestGevTemporalSpline(unittest.TestCase):
                                        msg='for the {} parameter at year={}'.format(param_name, year),
                                        places=2)
         # Assert that indicators are correctly computed
-        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh())
-        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic())
-        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic())
+        self.assertAlmostEqual(estimator.result_from_model_fit.nllh, estimator.nllh)
+        self.assertAlmostEqual(estimator.result_from_model_fit.aic, estimator.aic)
+        self.assertAlmostEqual(estimator.result_from_model_fit.bic, estimator.bic)
 
     def test_gev_temporal_margin_fit_spline_two_linear_location(self):
         self.function_test_gev_temporal_margin_fit_non_stationary_spline(NonStationaryTwoLinearLocationModel,
diff --git a/test/test_extreme_trend/test_two_fold_fit.py b/test/test_extreme_trend/test_two_fold_fit.py
deleted file mode 100644
index 2963d2974330038e2c743f7cfb3f90b5d27c86f8..0000000000000000000000000000000000000000
--- a/test/test_extreme_trend/test_two_fold_fit.py
+++ /dev/null
@@ -1,80 +0,0 @@
-import unittest
-
-import numpy as np
-
-from extreme_data.meteo_france_data.scm_models_data.altitudes_studies import AltitudesStudies
-from extreme_data.meteo_france_data.scm_models_data.safran.safran import SafranSnowfall1Day
-from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import ConstantMarginModel
-from extreme_fit.model.margin_model.utils import MarginFitMethod
-from extreme_fit.model.utils import set_seed_for_test
-from extreme_trend.two_fold_fit.two_fold_datasets_generator import \
-    TwoFoldDatasetsGenerator
-from extreme_trend.two_fold_fit.two_fold_detail_fit import TwoFoldModelFit
-from extreme_trend.two_fold_fit.two_fold_fit import TwoFoldFit
-from extreme_trend.two_fold_fit.utils import Score
-from spatio_temporal_dataset.slicer.split import Split
-
-
-def load_two_fold_fit(fit_method, year_max):
-    altitudes = [900, 1200]
-    study_class = SafranSnowfall1Day
-    studies = AltitudesStudies(study_class, altitudes, year_max=year_max)
-    two_fold_datasets_generator = TwoFoldDatasetsGenerator(studies, nb_samples=1, massif_names=['Vercors'])
-    model_family_name_to_model_class = {'Stationary': [ConstantMarginModel]}
-    return TwoFoldFit(two_fold_datasets_generator=two_fold_datasets_generator,
-                      model_family_name_to_model_classes=model_family_name_to_model_class,
-                      fit_method=fit_method)
-
-
-class TestTwoFoldFit(unittest.TestCase):
-
-    def setUp(self) -> None:
-        super().setUp()
-        set_seed_for_test()
-
-    def test_determinism_dataset_generation(self):
-        two_fold_fit = load_two_fold_fit(fit_method=MarginFitMethod.spatial_extremes_mle, year_max=1963)
-        massif_fit = two_fold_fit.massif_name_to_massif_fit['Vercors']
-        sample_fit = massif_fit.sample_id_to_sample_fit[0]
-        model_fit = sample_fit.model_class_to_model_fit[ConstantMarginModel]  # type: TwoFoldModelFit
-        dataset_fold1 = model_fit.estimator_fold_1.dataset
-        index_train = list(dataset_fold1.coordinates.coordinates_index(split=Split.train_temporal))
-        self.assertEqual([2, 3, 8, 9], index_train)
-        self.assertEqual(110.52073192596436, np.sum(dataset_fold1.maxima_gev(split=Split.train_temporal)))
-
-    def test_determinism_fit_spatial_extreme(self):
-        two_fold_fit = load_two_fold_fit(fit_method=MarginFitMethod.spatial_extremes_mle, year_max=2019)
-        massif_fit = two_fold_fit.massif_name_to_massif_fit['Vercors']
-        model_fit = massif_fit.sample_id_to_sample_fit[0].model_class_to_model_fit[
-            ConstantMarginModel]  # type: TwoFoldModelFit
-        self.assertEqual(232.1804953450304, model_fit.score(score=Score.NLLH_TEST))
-
-class TestTwoFoldDatasetsGenerator(unittest.TestCase):
-
-    def setUp(self) -> None:
-        super().setUp()
-        altitudes = [900, 1200]
-        study_class = SafranSnowfall1Day
-        studies = AltitudesStudies(study_class, altitudes, year_min=1959, year_max=1963)
-        self.two_fold_estimation = TwoFoldDatasetsGenerator(studies, nb_samples=2)
-
-    def test_dataset_sizes(self):
-        dataset1, dataset2 = self.two_fold_estimation.two_fold_datasets('Vercors')
-        np.testing.assert_equal(dataset1.maxima_gev(Split.train_temporal), dataset2.maxima_gev(Split.test_temporal))
-        np.testing.assert_equal(dataset1.maxima_gev(Split.test_temporal), dataset2.maxima_gev(Split.train_temporal))
-
-    def test_crash(self):
-        dataset1, _ = self.two_fold_estimation.two_fold_datasets('Vercors')
-        with self.assertRaises(AssertionError):
-            dataset1.maxima_gev(split=Split.train_spatiotemporal)
-        with self.assertRaises(AssertionError):
-            dataset1.maxima_gev(split=Split.train_spatial)
-
-    def test_temporal_steps(self):
-        dataset1, _ = self.two_fold_estimation.two_fold_datasets('Vercors')
-        self.assertEqual(len(dataset1.coordinates.df_temporal_coordinates(split=Split.train_temporal)), 2)
-        self.assertEqual(len(dataset1.coordinates.df_temporal_coordinates(split=Split.test_temporal)), 3)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/test_spatio_temporal_dataset/test_coordinates.py b/test/test_spatio_temporal_dataset/test_coordinates.py
index ce5365378835b0ded31d011ac946ef03803c5e35..fb012a530d549ebc7fba5fcb7c1af13f5a087901 100644
--- a/test/test_spatio_temporal_dataset/test_coordinates.py
+++ b/test/test_spatio_temporal_dataset/test_coordinates.py
@@ -1,24 +1,25 @@
 import unittest
+from collections import OrderedDict
+
 import numpy as np
 import pandas as pd
-from collections import Counter, OrderedDict
 
 from extreme_fit.model.utils import set_seed_for_test
 from spatio_temporal_dataset.coordinates.abstract_coordinates import AbstractCoordinates
-from spatio_temporal_dataset.coordinates.spatio_temporal_coordinates.abstract_spatio_temporal_coordinates import \
-    AbstractSpatioTemporalCoordinates
-from spatio_temporal_dataset.coordinates.spatio_temporal_coordinates.generated_spatio_temporal_coordinates import \
-    UniformSpatioTemporalCoordinates, GeneratedSpatioTemporalCoordinates
-from spatio_temporal_dataset.coordinates.spatial_coordinates.coordinates_1D import UniformSpatialCoordinates, \
-    LinSpaceSpatialCoordinates
 from spatio_temporal_dataset.coordinates.spatial_coordinates.alps_station_2D_coordinates import \
     AlpsStation2DCoordinatesBetweenZeroAndOne
 from spatio_temporal_dataset.coordinates.spatial_coordinates.alps_station_3D_coordinates import \
     AlpsStation3DCoordinatesWithAnisotropy
+from spatio_temporal_dataset.coordinates.spatial_coordinates.coordinates_1D import UniformSpatialCoordinates, \
+    LinSpaceSpatialCoordinates
 from spatio_temporal_dataset.coordinates.spatial_coordinates.generated_spatial_coordinates import \
     CircleSpatialCoordinates
+from spatio_temporal_dataset.coordinates.spatio_temporal_coordinates.abstract_spatio_temporal_coordinates import \
+    AbstractSpatioTemporalCoordinates
+from spatio_temporal_dataset.coordinates.spatio_temporal_coordinates.generated_spatio_temporal_coordinates import \
+    GeneratedSpatioTemporalCoordinates
 from spatio_temporal_dataset.coordinates.temporal_coordinates.abstract_temporal_covariate_for_fit import \
-    AbstractTemporalCovariateForFit, TimeTemporalCovariate
+    TimeTemporalCovariate
 from spatio_temporal_dataset.coordinates.temporal_coordinates.generated_temporal_coordinates import \
     ConsecutiveTemporalCoordinates
 from spatio_temporal_dataset.coordinates.temporal_coordinates.temperature_covariate import \
@@ -28,9 +29,8 @@ from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.
 from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.uniform_normalization import \
     BetweenZeroAndOneNormalization
 from spatio_temporal_dataset.coordinates.utils import get_index_with_spatio_temporal_index_suffix
-from spatio_temporal_dataset.slicer.spatio_temporal_slicer import SpatioTemporalSlicer
-from test.test_utils import load_test_spatiotemporal_coordinates, load_test_spatial_coordinates, \
-    load_test_temporal_coordinates, load_test_1D_and_2D_spatial_coordinates
+from test.test_utils import load_test_spatiotemporal_coordinates, load_test_temporal_coordinates, \
+    load_test_1D_and_2D_spatial_coordinates
 
 
 class TestCoordinatesUtils(unittest.TestCase):
@@ -69,14 +69,6 @@ class SpatioTemporalCoordinates(unittest.TestCase):
     nb_points = 4
     nb_steps = 2
 
-    def test_temporal_circle(self):
-        self.coordinates = UniformSpatioTemporalCoordinates.from_nb_points_and_nb_steps(nb_points=self.nb_points,
-                                                                                        nb_steps=self.nb_steps,
-                                                                                        train_split_ratio=0.5)
-        c = Counter([len(self.coordinates.df_coordinates(split)) for split in SpatioTemporalSlicer.SPLITS])
-        good_count = c == Counter([2, 2, 2, 2]) or c == Counter([0, 0, 4, 4])
-        self.assertTrue(good_count)
-
     def test_unique_spatio_temporal_index_and_matching_spatial_index(self):
         spatial_coordinates = LinSpaceSpatialCoordinates.from_nb_points(self.nb_points)
         spatial_indexes = [[10, 11, 12, 13], ['a', 'b', 'c', 'd']]
@@ -88,8 +80,8 @@ class SpatioTemporalCoordinates(unittest.TestCase):
 
             # the uniqueness of each spatio temporal index is not garanteed by the current algo
             # it will work in classical cases, and raise an assert when uniqueness is needed (when using a slicer)
-            index1 = pd.Series(spatial_coordinates.spatial_index())
-            index2 = pd.Series(coordinates.spatial_index())
+            index1 = pd.Series(spatial_coordinates.spatial_index)
+            index2 = pd.Series(coordinates.spatial_index)
             ind = index1 != index2  # type: pd.Series
             self.assertEqual(sum(ind), 0, msg="spatial_coordinates:\n{} \n!= spatio_temporal_coordinates \n{}".
                              format(index1.loc[ind], index2.loc[ind]))
@@ -111,7 +103,7 @@ class SpatioTemporalCoordinates(unittest.TestCase):
         d[AbstractCoordinates.COORDINATE_Y] = [1]
         df = pd.DataFrame.from_dict(d)
         for df2 in [df, df.loc[:, ::-1]][-1:]:
-            coordinates = AbstractCoordinates(df=df2, slicer_class=SpatioTemporalSlicer)
+            coordinates = AbstractCoordinates(df=df2)
             self.assertEqual(list(coordinates.df_all_coordinates.columns),
                              [AbstractCoordinates.COORDINATE_X, AbstractCoordinates.COORDINATE_Y,
                               AbstractCoordinates.COORDINATE_Z])
@@ -121,7 +113,7 @@ class SpatioTemporalCoordinates(unittest.TestCase):
         d[AbstractCoordinates.COORDINATE_X] = [1]
         df = pd.DataFrame.from_dict(d)
         for df2 in [df, df.loc[:, ::-1]][-1:]:
-            coordinates = AbstractCoordinates(df=df2, slicer_class=SpatioTemporalSlicer)
+            coordinates = AbstractCoordinates(df=df2)
             self.assertEqual(list(coordinates.df_all_coordinates.columns),
                              [AbstractCoordinates.COORDINATE_X, AbstractCoordinates.COORDINATE_T])
 
@@ -137,8 +129,10 @@ class TestCoordinatesWithTransformedStartingPoint(unittest.TestCase):
     def test_starting_point_with_zero_one_normalization(self):
         # Load some 2D spatial coordinates
         coordinates = load_test_spatiotemporal_coordinates(nb_steps=self.nb_steps, nb_points=self.nb_points,
-                                                           transformation_class=BetweenZeroAndOneNormalization)[
-            1]  # type: AbstractSpatioTemporalCoordinates
+                                                           transformation_class=BetweenZeroAndOneNormalization)[1]  # type: AbstractSpatioTemporalCoordinates
+        self.assertEqual(None, coordinates.transformation_class)
+        self.assertEqual(BetweenZeroAndOneNormalization, coordinates.spatial_coordinates.transformation_class)
+        self.assertEqual(BetweenZeroAndOneNormalization, coordinates.temporal_coordinates.transformation_class)
         df = coordinates.df_temporal_coordinates_for_fit(starting_point=2)
         start_coordinates = df.iloc[2, 0]
         self.assertEqual(start_coordinates, 0.0)
diff --git a/test/test_spatio_temporal_dataset/test_dataset.py b/test/test_spatio_temporal_dataset/test_dataset.py
index e480e29442d44611c9104f33399218c9af665178..92c8452c1cb139fdf6442a1ea2e303a7bf8c1f0e 100644
--- a/test/test_spatio_temporal_dataset/test_dataset.py
+++ b/test/test_spatio_temporal_dataset/test_dataset.py
@@ -17,7 +17,6 @@ from spatio_temporal_dataset.coordinates.transformed_coordinates.transformation.
     BetweenZeroAndOneNormalization
 from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
 from spatio_temporal_dataset.dataset.simulation_dataset import MaxStableDataset, MarginDataset
-from spatio_temporal_dataset.slicer.split import Split
 from spatio_temporal_dataset.spatio_temporal_observations.annual_maxima_observations import AnnualMaxima
 from test.test_utils import load_test_max_stable_models, load_test_3D_spatial_coordinates, \
     load_test_1D_and_2D_spatial_coordinates, load_test_spatiotemporal_coordinates
@@ -128,7 +127,7 @@ class TestSpatioTemporalDataset(unittest.TestCase):
         observation_at_time_0_v1 = self.dataset.observations.df_maxima_gev.loc[ind_time_0].values.flatten()
 
         # Load observation correspond to time 0
-        maxima_gev = self.dataset.maxima_gev_for_spatial_extremes_package()
+        maxima_gev = self.dataset.maxima_gev_for_spatial_extremes_package
         maxima_gev = np.transpose(maxima_gev)
         self.assertEqual(maxima_gev.shape, (3, 2))
         observation_at_time_0_v2 = maxima_gev[:, 0]
@@ -147,7 +146,7 @@ class TestSpatioTemporalDataset(unittest.TestCase):
         observation_at_station_0_v1 = self.dataset.observations.df_maxima_gev.loc[ind_station_0].values.flatten()
 
         # Load observation correspond to time 0
-        maxima_gev = self.dataset.maxima_gev_for_spatial_extremes_package()
+        maxima_gev = self.dataset.maxima_gev_for_spatial_extremes_package
         maxima_gev = np.transpose(maxima_gev)
         self.assertEqual(maxima_gev.shape, (3, 2))
         observation_at_time_0_v2 = maxima_gev[0, :]
@@ -165,7 +164,7 @@ class TestSpatioTemporalDataset(unittest.TestCase):
             value=-1)
         observation_at_station_0_v1 = self.dataset.observations.df_maxima_gev.loc[ind_station_0].values.flatten()
         # Load observation correspond to time 0
-        maxima_gev = self.dataset.maxima_gev_for_spatial_extremes_package()
+        maxima_gev = self.dataset.maxima_gev_for_spatial_extremes_package
         maxima_gev = np.transpose(maxima_gev)
         self.assertEqual(maxima_gev.shape, (3, 2 * 2))
         observation_at_station_0_v2 = maxima_gev[0, :]
diff --git a/test/test_spatio_temporal_dataset/test_slicer.py b/test/test_spatio_temporal_dataset/test_slicer.py
deleted file mode 100644
index 716ace5eb13f5f6569334f96126e0711d91163e0..0000000000000000000000000000000000000000
--- a/test/test_spatio_temporal_dataset/test_slicer.py
+++ /dev/null
@@ -1,134 +0,0 @@
-import numpy as np
-import pandas as pd
-from typing import List
-
-import unittest
-
-from extreme_fit.model.margin_model.linear_margin_model.linear_margin_model import ConstantMarginModel
-from extreme_fit.model.max_stable_model.max_stable_models import Smith
-from spatio_temporal_dataset.dataset.abstract_dataset import AbstractDataset
-from spatio_temporal_dataset.dataset.simulation_dataset import FullSimulatedDataset
-from spatio_temporal_dataset.slicer.split import ALL_SPLITS_EXCEPT_ALL, Split, small_s_split_from_ratio, invert_s_split
-from test.test_utils import load_test_1D_and_2D_spatial_coordinates, load_test_spatiotemporal_coordinates, \
-    load_test_temporal_coordinates
-
-
-class TestSplitFunctions(unittest.TestCase):
-
-    def test_inversion(self):
-        index = pd.Index([0, 1])
-        s_split = small_s_split_from_ratio(index=index, train_split_ratio=0.5)
-        inverted_s_split = invert_s_split(s_split.copy())
-        np.testing.assert_equal(inverted_s_split.iloc[::-1].values, s_split.values)
-
-
-class TestSlicerForDataset(unittest.TestCase):
-
-    def __init__(self, methodName: str = ...) -> None:
-        super().__init__(methodName)
-        self.dataset = None
-
-    nb_points = 2
-    nb_steps = 2
-    nb_obs = 2
-
-    @property
-    def complete_shape(self):
-        pass
-
-    def load_datasets(self, train_split_ratio) -> List[AbstractDataset]:
-        pass
-
-    def get_shape(self, dataset, split):
-        return dataset.maxima_frech(split).shape
-
-    def check_shapes(self, train_split_ratio_to_observation_shape):
-        assert self.complete_shape is not None
-        for train_split_ratio, data_shape in train_split_ratio_to_observation_shape.items():
-            for dataset in self.load_datasets(train_split_ratio):
-                dataset.slicer.summary(show=False)
-                self.assertEqual(self.complete_shape, self.get_shape(dataset, Split.all))
-                for split in ALL_SPLITS_EXCEPT_ALL:
-                    if split in dataset.slicer.splits:
-                        self.assertEqual(data_shape, self.get_shape(dataset, split))
-                    else:
-                        with self.assertRaises(AssertionError):
-                            self.get_shape(dataset, split)
-
-
-class TestSlicerForSpatialDataset(TestSlicerForDataset):
-
-    @property
-    def complete_shape(self):
-        return self.nb_points, self.nb_obs
-
-    def load_datasets(self, train_split_ratio):
-        coordinates_list = load_test_1D_and_2D_spatial_coordinates(nb_points=self.nb_points,
-                                                                   train_split_ratio=train_split_ratio)
-        dataset_list = [FullSimulatedDataset.from_double_sampling(nb_obs=self.nb_obs,
-                                                                  margin_model=ConstantMarginModel(
-                                                                      coordinates=coordinates),
-                                                                  coordinates=coordinates, max_stable_model=Smith())
-                        for coordinates in coordinates_list]
-        return dataset_list
-
-    def test_spatial_slicer_for_spatial_dataset(self):
-        train_split_ratio_to_observation_shape = {
-            None: self.complete_shape,
-            0.5: (self.nb_points // 2, self.nb_obs),
-        }
-        self.check_shapes(train_split_ratio_to_observation_shape)
-
-
-class TestSlicerForTemporalDataset(TestSlicerForDataset):
-
-    @property
-    def complete_shape(self):
-        return self.nb_steps, self.nb_obs
-
-    def load_datasets(self, train_split_ratio):
-        coordinates_list = load_test_temporal_coordinates(nb_steps=self.nb_steps,
-                                                          train_split_ratio=train_split_ratio)
-        dataset_list = [FullSimulatedDataset.from_double_sampling(nb_obs=self.nb_obs,
-                                                                  margin_model=ConstantMarginModel(
-                                                                      coordinates=coordinates),
-                                                                  coordinates=coordinates, max_stable_model=Smith())
-                        for coordinates in coordinates_list]
-        return dataset_list
-
-    def test_temporal_slicer_for_temporal_dataset(self):
-        ind_tuple_to_observation_shape = {
-            None: self.complete_shape,
-            0.5: (self.nb_steps // 2, self.nb_obs),
-        }
-        self.check_shapes(ind_tuple_to_observation_shape)
-
-
-class TestSlicerForSpatioTemporalDataset(TestSlicerForDataset):
-
-    @property
-    def complete_shape(self):
-        return self.nb_points * self.nb_steps, self.nb_obs
-
-    def load_datasets(self, train_split_ratio):
-        coordinates_list = load_test_spatiotemporal_coordinates(nb_points=self.nb_points,
-                                                                nb_steps=self.nb_steps,
-                                                                train_split_ratio=train_split_ratio)
-        coordinates_list = [coordinates for coordinates in coordinates_list if coordinates.nb_coordinates <= 2]
-        dataset_list = [FullSimulatedDataset.from_double_sampling(nb_obs=self.nb_obs,
-                                                                  margin_model=ConstantMarginModel(
-                                                                      coordinates=coordinates),
-                                                                  coordinates=coordinates, max_stable_model=Smith())
-                        for coordinates in coordinates_list]
-        return dataset_list
-
-    def test_spatiotemporal_slicer_for_spatio_temporal_dataset(self):
-        ind_tuple_to_observation_shape = {
-            None: self.complete_shape,
-            0.5: (self.nb_steps * self.nb_points // 4, self.nb_obs),
-        }
-        self.check_shapes(ind_tuple_to_observation_shape)
-
-
-if __name__ == '__main__':
-    unittest.main()
diff --git a/test/test_spatio_temporal_dataset/test_spatio_temporal_observations.py b/test/test_spatio_temporal_dataset/test_spatio_temporal_observations.py
index 5b3e6f28572b4988924da469e844cd2d1b10d47a..33c095bc6cb7e06db457791b6b54b80b9a226d43 100644
--- a/test/test_spatio_temporal_dataset/test_spatio_temporal_observations.py
+++ b/test/test_spatio_temporal_dataset/test_spatio_temporal_observations.py
@@ -28,7 +28,7 @@ class TestSpatioTemporalObservations(unittest.TestCase):
         temporal_observation = AbstractSpatioTemporalObservations(df_maxima_frech=df)
         example = np.array([[3], [6]])
         temporal_observation.set_maxima_frech(maxima_frech_values=example)
-        maxima_frech = temporal_observation.maxima_frech()
+        maxima_frech = temporal_observation.maxima_frech
         self.assertTrue(np.equal(example, maxima_frech).all(), msg="{} {}".format(example, maxima_frech))
 
 
diff --git a/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_with_margin.py b/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_with_margin.py
index 8d80948d9df076a2584ee7952b37f804d5dada0d..33a642be8831636a128b80727dae1e6941b7eb4c 100644
--- a/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_with_margin.py
+++ b/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_with_margin.py
@@ -51,7 +51,7 @@ class TestRMaxStabWithMarginConstant(TestUnitaryAbstract):
     @property
     def python_output(self):
         dataset = self.python_code()
-        return np.sum(dataset.maxima_gev())
+        return np.sum(dataset.maxima_gev)
 
     def test_rmaxstab_with_constant_margin(self):
         self.compare()
@@ -96,7 +96,7 @@ class TestRMaxStabWithLinearMargin(TestUnitaryAbstract):
     @property
     def python_output(self):
         dataset = self.python_code()
-        return np.sum(dataset.maxima_gev())
+        return np.sum(dataset.maxima_gev)
 
     def test_rmaxstab_with_linear_margin(self):
         self.compare()
diff --git a/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_without_margin.py b/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_without_margin.py
index 92e9cee552f938a905fc5e686f52eb0fed99d945..251a0c82a398bcc3e5b4ffc07d0f923f967cabcb 100644
--- a/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_without_margin.py
+++ b/test/test_unitary_r_packages/test_spatial_extreme/test_rmaxstab/test_rmaxstab_without_margin.py
@@ -39,7 +39,7 @@ class TestRMaxStab(TestUnitaryAbstract):
         coordinates, max_stable_model = self.python_code()
         m = MaxStableAnnualMaxima.from_sampling(nb_obs=40, max_stable_model=max_stable_model, coordinates=coordinates)
         # TODO: understand why the array are not in the same order
-        return np.sum(m.maxima_frech())
+        return np.sum(m.maxima_frech)
 
     def test_rmaxstab(self):
         self.compare()
diff --git a/test/test_utils.py b/test/test_utils.py
index 49e95c9b786c4ced02e7fc8b16fb5801b3f6ad47..060d31b7e2a48dc3140b4f1d80906fbac6bb2cd4 100644
--- a/test/test_utils.py
+++ b/test/test_utils.py
@@ -89,16 +89,15 @@ def load_test_max_stable_models(default_covariance_function=None):
     return max_stable_models
 
 
-def load_test_spatial_coordinates(nb_points, coordinate_types, train_split_ratio=None, transformation_class=None):
-    return [coordinate_class.from_nb_points(nb_points=nb_points, train_split_ratio=train_split_ratio,
+def load_test_spatial_coordinates(nb_points, coordinate_types, transformation_class=None):
+    return [coordinate_class.from_nb_points(nb_points=nb_points,
                                             transformation_class=transformation_class)
             for coordinate_class in coordinate_types]
 
 
-def load_test_1D_and_2D_spatial_coordinates(nb_points, train_split_ratio=None, transformation_class=None) -> List[
+def load_test_1D_and_2D_spatial_coordinates(nb_points, transformation_class=None) -> List[
     AbstractSpatialCoordinates]:
     return load_test_spatial_coordinates(nb_points, TEST_1D_AND_2D_SPATIAL_COORDINATES,
-                                         train_split_ratio=train_split_ratio,
                                          transformation_class=transformation_class)
 
 
@@ -107,17 +106,14 @@ def load_test_3D_spatial_coordinates(nb_points, transformation_class=None) -> Li
                                          transformation_class=transformation_class)
 
 
-def load_test_temporal_coordinates(nb_steps, train_split_ratio=None, transformation_class=None) -> List[AbstractTemporalCoordinates]:
+def load_test_temporal_coordinates(nb_steps, transformation_class=None) -> List[AbstractTemporalCoordinates]:
     return [coordinate_class.from_nb_temporal_steps(nb_temporal_steps=nb_steps,
-                                                    train_split_ratio=train_split_ratio,
                                                     transformation_class=transformation_class)
             for coordinate_class in TEST_TEMPORAL_COORDINATES]
 
 
-def load_test_spatiotemporal_coordinates(nb_points, nb_steps, train_split_ratio=None,
-                                         transformation_class: type = None):
+def load_test_spatiotemporal_coordinates(nb_points, nb_steps, transformation_class: type = None):
     return [coordinate_class.from_nb_points_and_nb_steps(nb_points=nb_points, nb_steps=nb_steps,
-                                                         train_split_ratio=train_split_ratio,
                                                          transformation_class=transformation_class)
             for coordinate_class in TEST_SPATIO_TEMPORAL_COORDINATES]