Commit 3745d411 authored by Le Roux Erwan's avatar Le Roux Erwan
Browse files

[refactor] remove trend analysis folder. create extreme_trend_test folder

parent bc1a043e
No related merge requests found
Showing with 30 additions and 37 deletions
+30 -37
......@@ -12,7 +12,7 @@ from experiment.meteo_france_data.scm_models_data.safran.safran import SafranSno
SafranTemperature, SafranPrecipitation
from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \
StudyVisualizer
from projects.exceeding_snow_loads.discussion_data_comparison_with_eurocode.crocus_study_comparison_with_eurocode import \
from projects.exceeding_snow_loads.section_discussion.crocus_study_comparison_with_eurocode import \
CrocusDifferenceSnowLoad, \
CrocusSnowDensityAtMaxofSwe, CrocusDifferenceSnowLoadRescaledAndEurocodeToSeeSynchronization, \
CrocusSnowDepthDifference, CrocusSnowDepthAtMaxofSwe
......
......@@ -28,13 +28,15 @@ class AbstractGevTrendTest(object):
constrained_model_class=StationaryTemporalModel,
quantile_level=EUROCODE_QUANTILE,
fit_method=TemporalMarginFitMethod.extremes_fevd_mle):
super().__init__(years, maxima, starting_year)
self.years = years
self.maxima = maxima
self.starting_year = starting_year
self.unconstrained_model_class = unconstrained_model_class
self.constrained_model_class = constrained_model_class
self.quantile_level = quantile_level
self.fit_method = fit_method
# Load observations, coordinates and datasets
self.coordinates, self.dataset = load_temporal_coordinates_and_dataset(maxima, years)
self.coordinates, self.dataset = load_temporal_coordinates_and_dataset(self.maxima, self.years)
# By default crashed boolean is False
self.crashed = False
try:
......
from collections import OrderedDict, Counter
from enum import Enum
from multiprocessing.pool import Pool
from typing import Dict, List, Tuple
......@@ -15,9 +16,7 @@ from experiment.meteo_france_data.scm_models_data.abstract_extended_study import
from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy
from experiment.meteo_france_data.scm_models_data.visualization.study_visualizer import \
StudyVisualizer
from projects.exceeding_snow_loads.check_mcmc_convergence_for_return_levels.gelman_convergence_test import \
compute_gelman_convergence_value
from projects.exceeding_snow_loads.paper_utils import ModelSubsetForUncertainty, NON_STATIONARY_TREND_TEST_PAPER
from projects.exceeding_snow_loads.utils import NON_STATIONARY_TREND_TEST_PAPER
from extreme_trend_test.abstract_gev_trend_test import AbstractGevTrendTest
from extreme_trend_test.trend_test_one_parameter.gumbel_trend_test_one_parameter import \
GumbelLocationTrendTest, GevStationaryVersusGumbel, GumbelScaleTrendTest, GumbelVersusGumbel
......@@ -34,6 +33,14 @@ from extreme_fit.model.result_from_model_fit.result_from_extremes.eurocode_retur
from root_utils import NB_CORES
class ModelSubsetForUncertainty(Enum):
stationary_gumbel = 0
stationary_gumbel_and_gev = 1
non_stationary_gumbel = 2
non_stationary_gumbel_and_gev = 3
stationary_gev = 4
class StudyVisualizerForNonStationaryTrends(StudyVisualizer):
def __init__(self, study: AbstractStudy, show=True, save_to_file=False, only_one_graph=False, only_first_row=False,
......@@ -368,17 +375,6 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer):
def model_name_to_uncertainty_method_to_ratio_above_eurocode(self):
assert self.uncertainty_massif_names == self.study.study_massif_names
# Some checks with Gelman convergence diagnosis
def massif_name_to_gelman_convergence_value(self, mcmc_iterations, model_class, nb_chains):
arguments = [(self.massif_name_to_years_and_maxima_for_model_fitting[m], mcmc_iterations, model_class, nb_chains)
for m in self.uncertainty_massif_names]
if self.multiprocessing:
with Pool(NB_CORES) as p:
res = p.starmap(compute_gelman_convergence_value, arguments)
else:
res = [compute_gelman_convergence_value(*argument) for argument in arguments]
return dict(zip(self.uncertainty_massif_names, res))
# Some values for the histogram
......@@ -505,3 +501,4 @@ class StudyVisualizerForNonStationaryTrends(StudyVisualizer):
psnow_before, psnow_after = [np.count_nonzero(s) / len(s) for s in [maxima_before, maxima_after]]
return 100 * (psnow_after - psnow_before) / psnow_before
return {m: compute_relative_change_in_psnow(self.massif_name_to_years_and_maxima[m][1]) for m in self.massifs_names_with_year_without_snow}
......@@ -3,7 +3,7 @@ from cached_property import cached_property
from experiment.meteo_france_data.scm_models_data.visualization.create_shifted_cmap import get_shifted_map
from experiment.meteo_france_data.scm_models_data.abstract_study import AbstractStudy
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import \
StudyVisualizerForNonStationaryTrends
......
......@@ -2,10 +2,9 @@ from collections import OrderedDict
from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear_margin_model import \
TemporalMarginFitMethod
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import \
StudyVisualizerForNonStationaryTrends
def load_altitude_to_visualizer(altitudes, massif_names, model_subsets_for_uncertainty, study_class,
uncertainty_methods,
study_visualizer_class=StudyVisualizerForNonStationaryTrends,
......
from multiprocessing.pool import Pool
import matplotlib as mpl
from extreme_trend_test.visualizers.utils import load_altitude_to_visualizer
mpl.use('Agg')
mpl.rcParams['text.usetex'] = True
mpl.rcParams['text.latex.preamble'] = [r'\usepackage{amsmath}']
......@@ -11,21 +14,13 @@ from experiment.meteo_france_data.scm_models_data.safran.safran import SafranPre
SafranSnowfall5Days, SafranSnowfall3Days, SafranSnowfall7Days, SafranRainfall1Day, SafranRainfall3Days, \
SafranRainfall5Days, SafranRainfall7Days
from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal, CrocusSnowLoad3Days, \
from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoad3Days, \
CrocusSnowLoad5Days, CrocusSnowLoad7Days, CrocusSnowLoad1Day
from extreme_fit.model.result_from_model_fit.result_from_extremes.confidence_interval_method import \
ConfidenceIntervalMethodFromExtremes
from projects.contrasting_snow_loads.plot_contrasting_trend_curves import plot_contrasting_trend_curves
from projects.exceeding_snow_loads.paper_main_utils import load_altitude_to_visualizer
from projects.exceeding_snow_loads.paper_utils import paper_study_classes, paper_altitudes
from projects.exceeding_snow_loads.result_trends_and_return_levels.main_result_trends_and_return_levels import \
from projects.exceeding_snow_loads.section_results.main_result_trends_and_return_levels import \
compute_minimized_aic
from projects.exceeding_snow_loads.result_trends_and_return_levels.plot_selection_curves import plot_selection_curves
from projects.exceeding_snow_loads.result_trends_and_return_levels.plot_trend_curves import plot_trend_curves, \
plot_trend_map
from projects.exceeding_snow_loads.result_trends_and_return_levels.plot_uncertainty_curves import plot_uncertainty_massifs
from projects.exceeding_snow_loads.result_trends_and_return_levels.plot_uncertainty_histogram import \
plot_uncertainty_histogram
from root_utils import NB_CORES
......
......@@ -3,8 +3,8 @@ import matplotlib.pyplot as plt
from experiment.meteo_france_data.scm_models_data.abstract_extended_study import AbstractExtendedStudy
from experiment.meteo_france_data.scm_models_data.visualization.utils import create_adjusted_axes
from projects.exceeding_snow_loads.paper_utils import dpi_paper1_figure
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \
from projects.exceeding_snow_loads.utils import dpi_paper1_figure
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import \
StudyVisualizerForNonStationaryTrends
......
import pandas as pd
from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import StudyVisualizerForNonStationaryTrends
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import StudyVisualizerForNonStationaryTrends
def mix_dsitrbution_impact():
......
......@@ -3,7 +3,7 @@ import matplotlib.pyplot as plt
from experiment.meteo_france_data.scm_models_data.crocus.crocus import CrocusSnowLoadTotal
from experiment.meteo_france_data.scm_models_data.visualization.main_study_visualizer import \
ALL_ALTITUDES_WITHOUT_NAN
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import \
StudyVisualizerForNonStationaryTrends
......
......@@ -12,7 +12,7 @@ from extreme_fit.model.margin_model.linear_margin_model.abstract_temporal_linear
from extreme_fit.model.result_from_model_fit.result_from_extremes.abstract_extract_eurocode_return_level import \
AbstractExtractEurocodeReturnLevel
from projects.exceeding_snow_loads.data.main_example_swe_total_plot import tuples_for_examples_paper1
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import \
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import \
StudyVisualizerForNonStationaryTrends
from extreme_fit.distribution.gev.gev_params import GevParams
......
......@@ -3,8 +3,8 @@ from typing import Dict
import pandas as pd
from experiment.eurocode_data.utils import EUROCODE_ALTITUDES
from projects.exceeding_snow_loads.paper_utils import ModelSubsetForUncertainty
from projects.exceeding_snow_loads.study_visualizer_for_non_stationary_trends import StudyVisualizerForNonStationaryTrends
from extreme_trend_test.visualizers.study_visualizer_for_non_stationary_trends import \
StudyVisualizerForNonStationaryTrends, ModelSubsetForUncertainty
def uncertainty_interval_size(altitude_to_visualizer: Dict[int, StudyVisualizerForNonStationaryTrends]):
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment