Commit 153df545 authored by Thibault Hallouin's avatar Thibault Hallouin
Browse files

use xexpression in evalp signature

1 merge request!3release v0.1.0
Pipeline #42704 passed with stage
in 2 minutes and 15 seconds
Showing with 337 additions and 345 deletions
+337 -345
...@@ -21,7 +21,6 @@ message(STATUS "Found xtensor: ${xtensor_INCLUDE_DIRS}/xtensor") ...@@ -21,7 +21,6 @@ message(STATUS "Found xtensor: ${xtensor_INCLUDE_DIRS}/xtensor")
# define evalhyd library # define evalhyd library
add_library( add_library(
evalhyd evalhyd
src/probabilist/evalp.cpp
src/probabilist/evaluator_brier.cpp src/probabilist/evaluator_brier.cpp
src/probabilist/evaluator_elements.cpp src/probabilist/evaluator_elements.cpp
src/probabilist/evaluator_quantiles.cpp src/probabilist/evaluator_quantiles.cpp
......
...@@ -4,9 +4,16 @@ ...@@ -4,9 +4,16 @@
#include <unordered_map> #include <unordered_map>
#include <vector> #include <vector>
#include <xtensor/xexpression.hpp>
#include <xtensor/xtensor.hpp> #include <xtensor/xtensor.hpp>
#include <xtensor/xarray.hpp> #include <xtensor/xarray.hpp>
#include "utils.hpp"
#include "masks.hpp"
#include "maths.hpp"
#include "uncertainty.hpp"
#include "probabilist/evaluator.hpp"
namespace evalhyd namespace evalhyd
{ {
...@@ -115,17 +122,280 @@ namespace evalhyd ...@@ -115,17 +122,280 @@ namespace evalhyd
/// evalhyd::evalp(obs, prd, {"CRPS"}); /// evalhyd::evalp(obs, prd, {"CRPS"});
/// ///
/// \endrst /// \endrst
template <class D2, class D4, class B4>
std::vector<xt::xarray<double>> evalp( std::vector<xt::xarray<double>> evalp(
const xt::xtensor<double, 2>& q_obs, const xt::xexpression<D2>& q_obs,
const xt::xtensor<double, 4>& q_prd, const xt::xexpression<D4>& q_prd,
const std::vector<std::string>& metrics, const std::vector<std::string>& metrics,
const xt::xtensor<double, 2>& q_thr = {}, const xt::xexpression<D2>& q_thr = D2({}),
const xt::xtensor<bool, 4>& t_msk = {}, const xt::xexpression<B4>& t_msk = B4({}),
const xt::xtensor<std::array<char, 32>, 2>& m_cdt = {}, const xt::xtensor<std::array<char, 32>, 2>& m_cdt = {},
const std::unordered_map<std::string, int>& bootstrap = const std::unordered_map<std::string, int>& bootstrap =
{{"n_samples", -9}, {"len_sample", -9}, {"summary", 0}}, {{"n_samples", -9}, {"len_sample", -9}, {"summary", 0}},
const std::vector<std::string>& dts = {} const std::vector<std::string>& dts = {}
); )
{
// retrieve real types of the expressions
const D2& q_obs_ = q_obs.derived_cast();
const D4& q_prd_ = q_prd.derived_cast();
const D2& q_thr_ = q_thr.derived_cast();
const B4& t_msk_ = t_msk.derived_cast();
// check that the metrics to be computed are valid
eh::utils::check_metrics(
metrics,
{"BS", "BSS", "BS_CRD", "BS_LBD", "QS", "CRPS"}
);
// check that optional parameters are given as arguments
eh::utils::evalp::check_optionals(metrics, q_thr_);
eh::utils::check_bootstrap(bootstrap);
// check that data dimensions are compatible
// > time
if (q_obs_.shape(1) != q_prd_.shape(3))
throw std::runtime_error(
"observations and predictions feature different "
"temporal lengths"
);
if (t_msk_.size() > 0)
if (q_obs_.shape(1) != t_msk_.shape(3))
throw std::runtime_error(
"observations and masks feature different "
"temporal lengths"
);
if (!dts.empty())
if (q_obs_.shape(1) != dts.size())
throw std::runtime_error(
"observations and datetimes feature different "
"temporal lengths"
);
// > leadtimes
if (t_msk_.size() > 0)
if (q_prd_.shape(1) != t_msk_.shape(1))
throw std::runtime_error(
"predictions and temporal masks feature different "
"numbers of lead times"
);
// > sites
if (q_obs_.shape(0) != q_prd_.shape(0))
throw std::runtime_error(
"observations and predictions feature different "
"numbers of sites"
);
if (t_msk_.size() > 0)
if (q_obs_.shape(0) != t_msk_.shape(0))
throw std::runtime_error(
"observations and temporal masks feature different "
"numbers of sites"
);
if (m_cdt.size() > 0)
if (q_obs_.shape(0) != m_cdt.shape(0))
throw std::runtime_error(
"observations and masking conditions feature different "
"numbers of sites"
);
// retrieve dimensions
std::size_t n_sit = q_prd_.shape(0);
std::size_t n_ltm = q_prd_.shape(1);
std::size_t n_mbr = q_prd_.shape(2);
std::size_t n_tim = q_prd_.shape(3);
std::size_t n_thr = q_thr_.shape(1);
std::size_t n_msk = t_msk_.size() > 0 ? t_msk_.shape(2) :
(m_cdt.size() > 0 ? m_cdt.shape(1) : 1);
std::size_t n_exp = bootstrap.find("n_samples")->second == -9 ? 1:
bootstrap.find("n_samples")->second;
// register metrics number of dimensions
std::unordered_map<std::string, std::vector<std::size_t>> dim;
dim["BS"] = {n_sit, n_ltm, n_msk, n_exp, n_thr};
dim["BSS"] = {n_sit, n_ltm, n_msk, n_exp, n_thr};
dim["BS_CRD"] = {n_sit, n_ltm, n_msk, n_exp, n_thr, 3};
dim["BS_LBD"] = {n_sit, n_ltm, n_msk, n_exp, n_thr, 3};
dim["QS"] = {n_sit, n_ltm, n_msk, n_exp, n_mbr};
dim["CRPS"] = {n_sit, n_ltm, n_msk, n_exp};
// declare maps for memoisation purposes
std::unordered_map<std::string, std::vector<std::string>> elt;
std::unordered_map<std::string, std::vector<std::string>> dep;
// register potentially recurring computation elements across metrics
elt["bs"] = {"o_k", "y_k"};
elt["BSS"] = {"o_k", "bar_o"};
elt["BS_CRD"] = {"o_k", "bar_o", "y_k"};
elt["BS_LBD"] = {"o_k", "y_k"};
elt["qs"] = {"q_qnt"};
// register nested metrics (i.e. metric dependent on another metric)
dep["BS"] = {"bs"};
dep["BSS"] = {"bs"};
dep["QS"] = {"qs"};
dep["CRPS"] = {"qs", "crps"};
// determine required elt/dep to be pre-computed
std::vector<std::string> req_elt;
std::vector<std::string> req_dep;
eh::utils::find_requirements(metrics, elt, dep, req_elt, req_dep);
// generate masks from conditions if provided
auto gen_msk = [&]() {
xt::xtensor<bool, 4> c_msk = xt::zeros<bool>({n_sit, n_ltm, n_msk, n_tim});
if (m_cdt.size() > 0)
for (int s = 0; s < n_sit; s++)
for (int l = 0; l < n_ltm; l++)
for (int m = 0; m < n_msk; m++)
xt::view(c_msk, s, l, m) =
eh::masks::generate_mask_from_conditions(
xt::view(m_cdt, s, m),
xt::view(q_obs_, s),
xt::view(q_prd_, s, l)
);
return c_msk;
};
const xt::xtensor<bool, 4> c_msk = gen_msk();
// generate bootstrap experiment if requested
std::vector<xt::xkeep_slice<int>> b_exp;
auto n_samples = bootstrap.find("n_samples")->second;
auto len_sample = bootstrap.find("len_sample")->second;
if ((n_samples != -9) && (len_sample != -9))
{
if (dts.empty())
throw std::runtime_error(
"bootstrap requested but datetimes not provided"
);
b_exp = eh::uncertainty::bootstrap(
dts, n_samples, len_sample
);
}
else
{
// if no bootstrap requested, generate one sample
// containing all the time indices once
xt::xtensor<int, 1> all = xt::arange(n_tim);
b_exp.push_back(xt::keep(all));
}
// initialise data structure for outputs
std::vector<xt::xarray<double>> r;
for (const auto& metric : metrics)
r.emplace_back(xt::zeros<double>(dim[metric]));
auto summary = bootstrap.find("summary")->second;
// compute variables one site at a time to minimise memory imprint
for (int s = 0; s < n_sit; s++)
// compute variables one lead time at a time to minimise memory imprint
for (int l = 0; l < n_ltm; l++)
{
// instantiate probabilist evaluator
const auto q_obs_v = xt::view(q_obs_, s, xt::all());
const auto q_prd_v = xt::view(q_prd_, s, l, xt::all(), xt::all());
const auto q_thr_v = xt::view(q_thr_, s, xt::all());
const auto t_msk_v =
t_msk_.size() > 0 ?
xt::view(t_msk_, s, l, xt::all(), xt::all()) :
(m_cdt.size() > 0 ?
xt::view(c_msk, s, l, xt::all(), xt::all()) :
xt::view(t_msk_, s, l, xt::all(), xt::all()));
eh::probabilist::Evaluator evaluator(
q_obs_v, q_prd_v, q_thr_v, t_msk_v, b_exp
);
// pre-compute required elt
for (const auto& element : req_elt)
{
if ( element == "o_k" )
evaluator.calc_o_k();
else if ( element == "bar_o" )
evaluator.calc_bar_o();
else if ( element == "y_k" )
evaluator.calc_y_k();
else if ( element == "q_qnt" )
evaluator.calc_q_qnt();
}
// pre-compute required dep
for (const auto& dependency : req_dep)
{
if ( dependency == "bs" )
evaluator.calc_bs();
else if ( dependency == "qs" )
evaluator.calc_qs();
else if ( dependency == "crps" )
evaluator.calc_crps();
}
// retrieve or compute requested metrics
for (int m = 0; m < metrics.size(); m++)
{
const auto& metric = metrics[m];
if ( metric == "BS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BS();
// (sites, lead times, subsets, samples, thresholds)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BS, summary);
}
else if ( metric == "BSS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BSS();
// (sites, lead times, subsets, samples, thresholds)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BSS, summary);
}
else if ( metric == "BS_CRD" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BS_CRD();
// (sites, lead times, subsets, samples, thresholds, components)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BS_CRD, summary);
}
else if ( metric == "BS_LBD" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BS_LBD();
// (sites, lead times, subsets, samples, thresholds, components)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BS_LBD, summary);
}
else if ( metric == "QS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_QS();
// (sites, lead times, subsets, samples, quantiles)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.QS, summary);
}
else if ( metric == "CRPS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_CRPS();
// (sites, lead times, subsets, samples)
xt::view(r[m], s, l, xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.CRPS, summary);
}
}
}
return r;
};
} }
#endif //EVALHYD_EVALP_HPP #endif //EVALHYD_EVALP_HPP
#include <utility>
#include <unordered_map>
#include <vector>
#include <array>
#include <stdexcept>
#include <xtensor/xtensor.hpp>
#include <xtensor/xarray.hpp>
#include <xtensor/xview.hpp>
#include "evalhyd/evalp.hpp"
#include "utils.hpp"
#include "masks.hpp"
#include "maths.hpp"
#include "uncertainty.hpp"
#include "probabilist/evaluator.hpp"
namespace eh = evalhyd;
namespace evalhyd
{
std::vector<xt::xarray<double>> evalp(
const xt::xtensor<double, 2>& q_obs,
const xt::xtensor<double, 4>& q_prd,
const std::vector<std::string>& metrics,
const xt::xtensor<double, 2>& q_thr,
const xt::xtensor<bool, 4>& t_msk,
const xt::xtensor<std::array<char, 32>, 2>& m_cdt,
const std::unordered_map<std::string, int>& bootstrap,
const std::vector<std::string>& dts
)
{
// check that the metrics to be computed are valid
eh::utils::check_metrics(
metrics,
{"BS", "BSS", "BS_CRD", "BS_LBD", "QS", "CRPS"}
);
// check that optional parameters are given as arguments
eh::utils::evalp::check_optionals(metrics, q_thr);
eh::utils::check_bootstrap(bootstrap);
// check that data dimensions are compatible
// > time
if (q_obs.shape(1) != q_prd.shape(3))
throw std::runtime_error(
"observations and predictions feature different "
"temporal lengths"
);
if (t_msk.size() > 0)
if (q_obs.shape(1) != t_msk.shape(3))
throw std::runtime_error(
"observations and masks feature different "
"temporal lengths"
);
if (!dts.empty())
if (q_obs.shape(1) != dts.size())
throw std::runtime_error(
"observations and datetimes feature different "
"temporal lengths"
);
// > leadtimes
if (t_msk.size() > 0)
if (q_prd.shape(1) != t_msk.shape(1))
throw std::runtime_error(
"predictions and temporal masks feature different "
"numbers of lead times"
);
// > sites
if (q_obs.shape(0) != q_prd.shape(0))
throw std::runtime_error(
"observations and predictions feature different "
"numbers of sites"
);
if (t_msk.size() > 0)
if (q_obs.shape(0) != t_msk.shape(0))
throw std::runtime_error(
"observations and temporal masks feature different "
"numbers of sites"
);
if (m_cdt.size() > 0)
if (q_obs.shape(0) != m_cdt.shape(0))
throw std::runtime_error(
"observations and masking conditions feature different "
"numbers of sites"
);
// retrieve dimensions
std::size_t n_sit = q_prd.shape(0);
std::size_t n_ltm = q_prd.shape(1);
std::size_t n_mbr = q_prd.shape(2);
std::size_t n_tim = q_prd.shape(3);
std::size_t n_thr = q_thr.shape(1);
std::size_t n_msk = t_msk.size() > 0 ? t_msk.shape(2) :
(m_cdt.size() > 0 ? m_cdt.shape(1) : 1);
std::size_t n_exp = bootstrap.find("n_samples")->second == -9 ? 1:
bootstrap.find("n_samples")->second;
// register metrics number of dimensions
std::unordered_map<std::string, std::vector<std::size_t>> dim;
dim["BS"] = {n_sit, n_ltm, n_msk, n_exp, n_thr};
dim["BSS"] = {n_sit, n_ltm, n_msk, n_exp, n_thr};
dim["BS_CRD"] = {n_sit, n_ltm, n_msk, n_exp, n_thr, 3};
dim["BS_LBD"] = {n_sit, n_ltm, n_msk, n_exp, n_thr, 3};
dim["QS"] = {n_sit, n_ltm, n_msk, n_exp, n_mbr};
dim["CRPS"] = {n_sit, n_ltm, n_msk, n_exp};
// declare maps for memoisation purposes
std::unordered_map<std::string, std::vector<std::string>> elt;
std::unordered_map<std::string, std::vector<std::string>> dep;
// register potentially recurring computation elements across metrics
elt["bs"] = {"o_k", "y_k"};
elt["BSS"] = {"o_k", "bar_o"};
elt["BS_CRD"] = {"o_k", "bar_o", "y_k"};
elt["BS_LBD"] = {"o_k", "y_k"};
elt["qs"] = {"q_qnt"};
// register nested metrics (i.e. metric dependent on another metric)
dep["BS"] = {"bs"};
dep["BSS"] = {"bs"};
dep["QS"] = {"qs"};
dep["CRPS"] = {"qs", "crps"};
// determine required elt/dep to be pre-computed
std::vector<std::string> req_elt;
std::vector<std::string> req_dep;
eh::utils::find_requirements(metrics, elt, dep, req_elt, req_dep);
// generate masks from conditions if provided
auto gen_msk = [&]() {
xt::xtensor<bool, 4> c_msk = xt::zeros<bool>({n_sit, n_ltm, n_msk, n_tim});
if (m_cdt.size() > 0)
for (int s = 0; s < n_sit; s++)
for (int l = 0; l < n_ltm; l++)
for (int m = 0; m < n_msk; m++)
xt::view(c_msk, s, l, m) =
eh::masks::generate_mask_from_conditions(
xt::view(m_cdt, s, m),
xt::view(q_obs, s),
xt::view(q_prd, s, l)
);
return c_msk;
};
const xt::xtensor<bool, 4> c_msk = gen_msk();
// generate bootstrap experiment if requested
std::vector<xt::xkeep_slice<int>> b_exp;
auto n_samples = bootstrap.find("n_samples")->second;
auto len_sample = bootstrap.find("len_sample")->second;
if ((n_samples != -9) && (len_sample != -9))
{
if (dts.empty())
throw std::runtime_error(
"bootstrap requested but datetimes not provided"
);
b_exp = eh::uncertainty::bootstrap(
dts, n_samples, len_sample
);
}
else
{
// if no bootstrap requested, generate one sample
// containing all the time indices once
xt::xtensor<int, 1> all = xt::arange(n_tim);
b_exp.push_back(xt::keep(all));
}
// initialise data structure for outputs
std::vector<xt::xarray<double>> r;
for (const auto& metric : metrics)
r.emplace_back(xt::zeros<double>(dim[metric]));
auto summary = bootstrap.find("summary")->second;
// compute variables one site at a time to minimise memory imprint
for (int s = 0; s < n_sit; s++)
// compute variables one lead time at a time to minimise memory imprint
for (int l = 0; l < n_ltm; l++)
{
// instantiate probabilist evaluator
const auto q_obs_v = xt::view(q_obs, s, xt::all());
const auto q_prd_v = xt::view(q_prd, s, l, xt::all(), xt::all());
const auto q_thr_v = xt::view(q_thr, s, xt::all());
const auto t_msk_v =
t_msk.size() > 0 ?
xt::view(t_msk, s, l, xt::all(), xt::all()) :
(m_cdt.size() > 0 ?
xt::view(c_msk, s, l, xt::all(), xt::all()) :
xt::view(t_msk, s, l, xt::all(), xt::all()));
eh::probabilist::Evaluator evaluator(
q_obs_v, q_prd_v, q_thr_v, t_msk_v, b_exp
);
// pre-compute required elt
for (const auto& element : req_elt)
{
if ( element == "o_k" )
evaluator.calc_o_k();
else if ( element == "bar_o" )
evaluator.calc_bar_o();
else if ( element == "y_k" )
evaluator.calc_y_k();
else if ( element == "q_qnt" )
evaluator.calc_q_qnt();
}
// pre-compute required dep
for (const auto& dependency : req_dep)
{
if ( dependency == "bs" )
evaluator.calc_bs();
else if ( dependency == "qs" )
evaluator.calc_qs();
else if ( dependency == "crps" )
evaluator.calc_crps();
}
// retrieve or compute requested metrics
for (int m = 0; m < metrics.size(); m++)
{
const auto& metric = metrics[m];
if ( metric == "BS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BS();
// (sites, lead times, subsets, samples, thresholds)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BS, summary);
}
else if ( metric == "BSS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BSS();
// (sites, lead times, subsets, samples, thresholds)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BSS, summary);
}
else if ( metric == "BS_CRD" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BS_CRD();
// (sites, lead times, subsets, samples, thresholds, components)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BS_CRD, summary);
}
else if ( metric == "BS_LBD" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_BS_LBD();
// (sites, lead times, subsets, samples, thresholds, components)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.BS_LBD, summary);
}
else if ( metric == "QS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_QS();
// (sites, lead times, subsets, samples, quantiles)
xt::view(r[m], s, l, xt::all(), xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.QS, summary);
}
else if ( metric == "CRPS" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_CRPS();
// (sites, lead times, subsets, samples)
xt::view(r[m], s, l, xt::all(), xt::all()) =
eh::uncertainty::summarise(evaluator.CRPS, summary);
}
}
}
return r;
}
}
#ifndef EVALHYD_PROBABILIST_EVALUATOR_H #ifndef EVALHYD_PROBABILIST_EVALUATOR_HPP
#define EVALHYD_PROBABILIST_EVALUATOR_H #define EVALHYD_PROBABILIST_EVALUATOR_HPP
#include <stdexcept> #include <stdexcept>
#include <vector> #include <vector>
...@@ -137,4 +137,4 @@ namespace evalhyd ...@@ -137,4 +137,4 @@ namespace evalhyd
} }
} }
#endif //EVALHYD_PROBABILIST_EVALUATOR_H #endif //EVALHYD_PROBABILIST_EVALUATOR_HPP
...@@ -43,11 +43,11 @@ TEST(ProbabilistTests, TestBrier) ...@@ -43,11 +43,11 @@ TEST(ProbabilistTests, TestBrier)
xt::xtensor<double, 2> thresholds = {{690, 534, 445, NAN}}; xt::xtensor<double, 2> thresholds = {{690, 534, 445, NAN}};
std::vector<xt::xarray<double>> metrics = std::vector<xt::xarray<double>> metrics =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
// shape: (sites [1], time [t]) // shape: (sites [1], time [t])
xt::view(observed, xt::newaxis(), xt::all()), xt::eval(xt::view(observed, xt::newaxis(), xt::all())),
// shape: (sites [1], lead times [1], members [m], time [t]) // shape: (sites [1], lead times [1], members [m], time [t])
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
{"BS", "BSS", "BS_CRD", "BS_LBD"}, {"BS", "BSS", "BS_CRD", "BS_LBD"},
thresholds thresholds
); );
...@@ -101,11 +101,11 @@ TEST(ProbabilistTests, TestQuantiles) ...@@ -101,11 +101,11 @@ TEST(ProbabilistTests, TestQuantiles)
// compute scores // compute scores
std::vector<xt::xarray<double>> metrics = std::vector<xt::xarray<double>> metrics =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
// shape: (sites [1], time [t]) // shape: (sites [1], time [t])
xt::view(observed, xt::newaxis(), xt::all()), xt::eval(xt::view(observed, xt::newaxis(), xt::all())),
// shape: (sites [1], lead times [1], members [m], time [t]) // shape: (sites [1], lead times [1], members [m], time [t])
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
{"QS", "CRPS"} {"QS", "CRPS"}
); );
...@@ -139,7 +139,7 @@ TEST(ProbabilistTests, TestMasks) ...@@ -139,7 +139,7 @@ TEST(ProbabilistTests, TestMasks)
std::tie(observed, predicted) = load_data_p(); std::tie(observed, predicted) = load_data_p();
// generate temporal subset by dropping 20 first time steps // generate temporal subset by dropping 20 first time steps
xt::xtensor<double, 4> masks = xt::xtensor<bool, 4> masks =
xt::ones<bool>({std::size_t {1}, std::size_t {1}, std::size_t {1}, xt::ones<bool>({std::size_t {1}, std::size_t {1}, std::size_t {1},
std::size_t {observed.size()}}); std::size_t {observed.size()}});
xt::view(masks, 0, xt::all(), 0, xt::range(0, 20)) = 0; xt::view(masks, 0, xt::all(), 0, xt::range(0, 20)) = 0;
...@@ -150,11 +150,11 @@ TEST(ProbabilistTests, TestMasks) ...@@ -150,11 +150,11 @@ TEST(ProbabilistTests, TestMasks)
{"BS", "BSS", "BS_CRD", "BS_LBD", "QS", "CRPS"}; {"BS", "BSS", "BS_CRD", "BS_LBD", "QS", "CRPS"};
std::vector<xt::xarray<double>> metrics_masked = std::vector<xt::xarray<double>> metrics_masked =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
// shape: (sites [1], time [t]) // shape: (sites [1], time [t])
xt::view(observed, xt::newaxis(), xt::all()), xt::eval(xt::view(observed, xt::newaxis(), xt::all())),
// shape: (sites [1], lead times [1], members [m], time [t]) // shape: (sites [1], lead times [1], members [m], time [t])
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, metrics,
thresholds, thresholds,
// shape: (sites [1], lead times [1], subsets [1], time [t]) // shape: (sites [1], lead times [1], subsets [1], time [t])
...@@ -163,11 +163,11 @@ TEST(ProbabilistTests, TestMasks) ...@@ -163,11 +163,11 @@ TEST(ProbabilistTests, TestMasks)
// compute scores on pre-computed subset of whole record // compute scores on pre-computed subset of whole record
std::vector<xt::xarray<double>> metrics_subset = std::vector<xt::xarray<double>> metrics_subset =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
// shape: (sites [1], time [t-20]) // shape: (sites [1], time [t-20])
xt::view(observed, xt::newaxis(), xt::range(20, _)), xt::eval(xt::view(observed, xt::newaxis(), xt::range(20, _))),
// shape: (sites [1], lead times [1], members [m], time [t-20]) // shape: (sites [1], lead times [1], members [m], time [t-20])
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::range(20, _)), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::range(20, _))),
metrics, metrics,
thresholds thresholds
); );
...@@ -205,19 +205,22 @@ TEST(ProbabilistTests, TestMaskingConditions) ...@@ -205,19 +205,22 @@ TEST(ProbabilistTests, TestMaskingConditions)
}; };
std::vector<xt::xarray<double>> metrics_q_conditioned = std::vector<xt::xarray<double>> metrics_q_conditioned =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
observed, xt::eval(observed),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, thresholds, metrics,
masks, q_conditions thresholds,
masks,
q_conditions
); );
// compute scores using "NaN-ed" time indices where conditions on streamflow met // compute scores using "NaN-ed" time indices where conditions on streamflow met
std::vector<xt::xarray<double>> metrics_q_preconditioned = std::vector<xt::xarray<double>> metrics_q_preconditioned =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
xt::where((observed < 2000) | (observed > 3000), observed, NAN), xt::eval(xt::where((observed < 2000) | (observed > 3000), observed, NAN)),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, thresholds metrics,
thresholds
); );
// check results are identical // check results are identical
...@@ -241,19 +244,22 @@ TEST(ProbabilistTests, TestMaskingConditions) ...@@ -241,19 +244,22 @@ TEST(ProbabilistTests, TestMaskingConditions)
double median = xt::median(q_prd_mean); double median = xt::median(q_prd_mean);
std::vector<xt::xarray<double>> metrics_q_conditioned_ = std::vector<xt::xarray<double>> metrics_q_conditioned_ =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
observed, xt::eval(observed),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, thresholds, metrics,
masks, q_conditions_ thresholds,
masks,
q_conditions_
); );
// compute scores using "NaN-ed" time indices where conditions on streamflow met // compute scores using "NaN-ed" time indices where conditions on streamflow met
std::vector<xt::xarray<double>> metrics_q_preconditioned_ = std::vector<xt::xarray<double>> metrics_q_preconditioned_ =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
xt::where(q_prd_mean >= median, observed, NAN), xt::eval(xt::where(q_prd_mean >= median, observed, NAN)),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, thresholds metrics,
thresholds
); );
// check results are identical // check results are identical
...@@ -274,19 +280,22 @@ TEST(ProbabilistTests, TestMaskingConditions) ...@@ -274,19 +280,22 @@ TEST(ProbabilistTests, TestMaskingConditions)
}; };
std::vector<xt::xarray<double>> metrics_t_conditioned = std::vector<xt::xarray<double>> metrics_t_conditioned =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
observed, xt::eval(observed),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, thresholds, metrics,
masks, t_conditions thresholds,
masks,
t_conditions
); );
// compute scores on already subset time series // compute scores on already subset time series
std::vector<xt::xarray<double>> metrics_t_subset = std::vector<xt::xarray<double>> metrics_t_subset =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
xt::view(observed, xt::all(), xt::range(0, 100)), xt::eval(xt::view(observed_, xt::newaxis(), xt::range(0, 100))),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::range(0, 100)), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::range(0, 100))),
metrics, thresholds metrics,
thresholds
); );
// check results are identical // check results are identical
...@@ -323,7 +332,7 @@ TEST(ProbabilistTests, TestMissingData) ...@@ -323,7 +332,7 @@ TEST(ProbabilistTests, TestMissingData)
{{ 4.7, 4.3, NAN, 2.7, 4.1 }}; {{ 4.7, 4.3, NAN, 2.7, 4.1 }};
std::vector<xt::xarray<double>> metrics_nan = std::vector<xt::xarray<double>> metrics_nan =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
observed_nan, observed_nan,
forecast_nan, forecast_nan,
metrics, metrics,
...@@ -342,7 +351,7 @@ TEST(ProbabilistTests, TestMissingData) ...@@ -342,7 +351,7 @@ TEST(ProbabilistTests, TestMissingData)
{{ 4.7, 4.3, 2.7 }}; {{ 4.7, 4.3, 2.7 }};
std::vector<xt::xarray<double>> metrics_pp1 = std::vector<xt::xarray<double>> metrics_pp1 =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
observed_pp1, observed_pp1,
forecast_pp1, forecast_pp1,
metrics, metrics,
...@@ -360,7 +369,7 @@ TEST(ProbabilistTests, TestMissingData) ...@@ -360,7 +369,7 @@ TEST(ProbabilistTests, TestMissingData)
{{ 4.3, 2.7, 4.1 }}; {{ 4.3, 2.7, 4.1 }};
std::vector<xt::xarray<double>> metrics_pp2 = std::vector<xt::xarray<double>> metrics_pp2 =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
observed_pp2, observed_pp2,
forecast_pp2, forecast_pp2,
metrics, metrics,
...@@ -415,12 +424,12 @@ TEST(ProbabilistTests, TestBootstrap) ...@@ -415,12 +424,12 @@ TEST(ProbabilistTests, TestBootstrap)
{{"n_samples", 10}, {"len_sample", 3}, {"summary", 0}}; {{"n_samples", 10}, {"len_sample", 3}, {"summary", 0}};
std::vector<xt::xarray<double>> metrics_bts = std::vector<xt::xarray<double>> metrics_bts =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
xt::view(observed, xt::newaxis(), xt::all()), xt::eval(xt::view(observed, xt::newaxis(), xt::all())),
xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, metrics,
thresholds, thresholds,
{}, // t_msk xt::xtensor<bool, 4>({}), // t_msk
{}, // m_cdt {}, // m_cdt
bootstrap, bootstrap,
datetimes datetimes
...@@ -438,9 +447,9 @@ TEST(ProbabilistTests, TestBootstrap) ...@@ -438,9 +447,9 @@ TEST(ProbabilistTests, TestBootstrap)
xt::concatenate(xt::xtuple(predicted, predicted, predicted), 1); xt::concatenate(xt::xtuple(predicted, predicted, predicted), 1);
std::vector<xt::xarray<double>> metrics_rep = std::vector<xt::xarray<double>> metrics_rep =
evalhyd::evalp( evalhyd::evalp<xt::xtensor<double, 2>, xt::xtensor<double, 4>, xt::xtensor<bool, 4>>(
xt::view(observed_x3, xt::newaxis(), xt::all()), xt::eval(xt::view(observed_x3, xt::newaxis(), xt::all())),
xt::view(predicted_x3, xt::newaxis(), xt::newaxis(), xt::all(), xt::all()), xt::eval(xt::view(predicted_x3, xt::newaxis(), xt::newaxis(), xt::all(), xt::all())),
metrics, metrics,
thresholds thresholds
); );
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment