Commit 5ec50320 authored by Thibault Hallouin's avatar Thibault Hallouin
Browse files

add metrics (RMSE, KGE, KGEPRIME)

No related merge requests found
Pipeline #37387 passed with stages
in 8 seconds
Showing with 225 additions and 19 deletions
+225 -19
......@@ -70,7 +70,7 @@ namespace evalhyd
// check that the metrics to be computed are valid
utils::check_metrics(
metrics,
{"NSE"}
{"RMSE", "NSE", "KGE", "KGEPRIME"}
);
// instantiate determinist evaluator
......@@ -81,7 +81,12 @@ namespace evalhyd
std::unordered_map<std::string, std::vector<std::string>> dep;
// register potentially recurring computation elt across metrics
// TODO
elt["RMSE"] = {"quad_err"};
elt["NSE"] = {"mean_obs", "quad_obs", "quad_err"};
elt["KGE"] = {"mean_obs", "mean_prd", "quad_obs", "quad_prd",
"r_pearson", "bias"};
elt["KGEPRIME"] = {"mean_obs", "mean_prd", "quad_obs", "quad_prd",
"r_pearson", "bias"};
// register nested metrics (i.e. metric dependent on another metric)
// TODO
......@@ -95,7 +100,20 @@ namespace evalhyd
// pre-compute required elt
for ( const auto& element : req_elt )
{
// TODO
if ( element == "mean_obs" )
evaluator.calc_mean_obs();
else if ( element == "mean_prd" )
evaluator.calc_mean_prd();
else if ( element == "quad_err" )
evaluator.calc_quad_err();
else if ( element == "quad_obs" )
evaluator.calc_quad_obs();
else if ( element == "quad_prd" )
evaluator.calc_quad_prd();
else if ( element == "r_pearson" )
evaluator.calc_r_pearson();
else if ( element == "bias" )
evaluator.calc_bias();
}
// pre-compute required dep
......@@ -109,13 +127,34 @@ namespace evalhyd
for ( const auto& metric : metrics )
{
if ( metric == "NSE" )
if ( metric == "RMSE" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_RMSE();
r.emplace_back(evaluator.RMSE);
}
else if ( metric == "NSE" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_NSE();
r.emplace_back(evaluator.NSE);
}
else if ( metric == "KGE" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_KGE();
r.emplace_back(evaluator.KGE);
}
else if ( metric == "KGEPRIME" )
{
if (std::find(req_dep.begin(), req_dep.end(), metric)
== req_dep.end())
evaluator.calc_KGEPRIME();
r.emplace_back(evaluator.KGEPRIME);
}
}
return r;
......
......@@ -16,7 +16,13 @@ namespace evalhyd
const A& q_obs;
const A& q_prd;
// members for computational elements
// TODO
A mean_obs;
A mean_prd;
A quad_err;
A quad_obs;
A quad_prd;
A r_pearson;
A bias;
public:
// constructor method
......@@ -35,13 +41,86 @@ namespace evalhyd
}
};
// members for evaluation metrics
A RMSE;
A NSE;
A KGE;
A KGEPRIME;
// methods to compute elements
// TODO
void calc_mean_obs();
void calc_mean_prd();
void calc_quad_err();
void calc_quad_obs();
void calc_quad_prd();
void calc_r_pearson();
void calc_bias();
// methods to compute metrics
void calc_RMSE();
void calc_NSE();
void calc_KGE();
void calc_KGEPRIME();
};
template <class A>
void Evaluator<A>::calc_mean_obs()
{
mean_obs = xt::mean(q_obs, -1, xt::keep_dims);
}
template <class A>
void Evaluator<A>::calc_mean_prd()
{
mean_prd = xt::mean(q_prd, -1, xt::keep_dims);
}
template <class A>
void Evaluator<A>::calc_quad_err()
{
quad_err = xt::square(q_obs - q_prd);
}
template <class A>
void Evaluator<A>::calc_quad_obs()
{
quad_obs = xt::square(q_obs - mean_obs);
}
template <class A>
void Evaluator<A>::calc_quad_prd()
{
quad_prd = xt::square(q_prd - mean_prd);
}
template <class A>
void Evaluator<A>::calc_r_pearson()
{
// calculate error in timing and dynamics $r_{pearson}$
// (Pearson's correlation coefficient)
auto r_num = xt::sum(
(q_prd - mean_prd) * (q_obs - mean_obs), -1, xt::keep_dims
);
auto r_den = xt::sqrt(
xt::sum(quad_prd, -1, xt::keep_dims)
* xt::sum(quad_obs, -1, xt::keep_dims)
);
r_pearson = r_num / r_den;
}
template <class A>
void Evaluator<A>::calc_bias()
{
// calculate $bias$
bias = xt::sum(q_prd, -1, xt::keep_dims)
/ xt::sum(q_obs, -1, xt::keep_dims);
}
template <class A>
void Evaluator<A>::calc_RMSE()
{
// compute RMSE
RMSE = xt::sqrt(xt::mean(quad_err, -1, xt::keep_dims));
}
// Compute the Nash-Sutcliffe Efficiency (NSE).
//
// If multi-dimensional arrays are provided, the arrays of predictions
......@@ -55,22 +134,51 @@ namespace evalhyd
// \require q_prd:
// Array of streamflow predictions.
// shape: (..., time)
// \assign nse
// \assign nse:
// Array of computed Nash-Sutcliffe efficiencies.
// shape: (...)
// shape: (..., 1)
template <class A>
void Evaluator<A>::calc_NSE()
{
// compute average observed flow
A q_avg = xt::mean(q_obs, -1, xt::keep_dims);
// compute squared errors operands
A f_num = xt::sum(xt::square(q_obs - q_prd), -1, xt::keep_dims);
A f_den = xt::sum(xt::square(q_obs - q_avg), -1, xt::keep_dims);
A f_num = xt::sum(quad_err, -1, xt::keep_dims);
A f_den = xt::sum(quad_obs, -1, xt::keep_dims);
// return computed NSE
// compute NSE
NSE = 1 - (f_num / f_den);
}
template <class A>
void Evaluator<A>::calc_KGE()
{
// calculate error in spread of flow $alpha$
auto alpha =
xt::stddev(q_prd, {q_prd.dimension() - 1}, xt::keep_dims)
/ xt::stddev(q_obs, {q_obs.dimension() - 1}, xt::keep_dims);
// compute KGE
KGE = 1 - xt::sqrt(
xt::square(r_pearson - 1)
+ xt::square(alpha - 1)
+ xt::square(bias - 1)
);
}
template <class A>
void Evaluator<A>::calc_KGEPRIME()
{
// calculate error in spread of flow $alpha$
auto gamma =
(xt::stddev(q_prd, {q_prd.dimension() - 1}, xt::keep_dims) / mean_prd)
/ (xt::stddev(q_obs, {q_obs.dimension() - 1}, xt::keep_dims) / mean_obs);
// compute KGE
KGEPRIME = 1 - xt::sqrt(
xt::square(r_pearson - 1)
+ xt::square(gamma - 1)
+ xt::square(bias - 1)
);
}
}
}
......
......@@ -8,7 +8,7 @@
#include "evalhyd/evald.hpp"
TEST(DeterministTests, TestNSE) {
TEST(DeterministTests, TestNSE1D2D) {
// read in data
std::ifstream ifs;
ifs.open("./data/q_obs.csv");
......@@ -37,10 +37,69 @@ TEST(DeterministTests, TestNSE) {
);
// check results (both with 2D and 1D tensors)
xt::xtensor<double, 1> nse_2d =
{0.71891219, 0.7190249, 0.71835777, 0.71810361, 0.71776748};
EXPECT_TRUE(xt::allclose(metrics_2d[0], nse_2d));
xt::xtensor<double, 2> nse =
{{0.71891219},
{0.7190249},
{0.71835777},
{0.71810361},
{0.71776748}};
EXPECT_TRUE(xt::allclose(metrics_2d[0], nse));
xt::xtensor<double, 1> nse_1d = {0.71891219};
EXPECT_TRUE(xt::allclose(metrics_1d[0], nse_1d));
EXPECT_TRUE(xt::allclose(metrics_1d[0], nse[0]));
}
TEST(DeterministTests, TestMetrics) {
// read in data
std::ifstream ifs;
ifs.open("./data/q_obs.csv");
xt::xtensor<double, 2> observed =xt::load_csv<int>(ifs);
ifs.close();
ifs.open("./data/q_prd.csv");
xt::xtensor<double, 2> predicted_2d = xt::view(
xt::load_csv<double>(ifs), xt::range(0, 5), xt::all()
);
ifs.close();
xt::xtensor<double, 1> predicted_1d = xt::row(predicted_2d, 0);
// compute scores (both with 2D and 1D tensors)
std::vector<xt::xarray<double>> metrics =
evalhyd::evald<xt::xtensor<double, 2>>(
observed, predicted_2d, {"RMSE", "NSE", "KGE", "KGEPRIME"}
);
// check results on all metrics
xt::xtensor<double, 2> rmse =
{{777.034272},
{776.878479},
{777.800217},
{778.151082},
{778.61487}};
EXPECT_TRUE(xt::allclose(metrics[0], rmse));
xt::xtensor<double, 2> nse =
{{0.718912},
{0.719025},
{0.718358},
{0.718104},
{0.717767}};
EXPECT_TRUE(xt::allclose(metrics[1], nse));
xt::xtensor<double, 2> kge =
{{0.748088},
{0.746106},
{0.744111},
{0.743011},
{0.741768}};
EXPECT_TRUE(xt::allclose(metrics[2], kge));
xt::xtensor<double, 2> kgeprime =
{{0.813141},
{0.812775},
{0.812032},
{0.811787},
{0.811387}};
EXPECT_TRUE(xt::allclose(metrics[3], kgeprime));
}
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment