Commit 185c867a authored by Poulet Camille's avatar Poulet Camille
Browse files

Merge branch 'SpawnerRunAnalysis' of gitlab-ssh.irstea.fr:SimAquaLife/GR3D...

Merge branch 'SpawnerRunAnalysis' of gitlab-ssh.irstea.fr:SimAquaLife/GR3D into exploration_GR3D_process
parents adc32209 65a76167
......@@ -25,6 +25,8 @@
<temperatureRiverFile>data/input/northeastamerica/observed_river_temperatures.csv</temperatureRiverFile>
<verboseCorrespondance>false</verboseCorrespondance>
<simulationName>easyRun</simulationName>
<useRealPDam>false</useRealPDam>
</environment.RIOBasinNetworkWithContinent>
......@@ -360,7 +360,7 @@
<epsilon>0.001</epsilon>
</analysis.AnalyseLikelihoodOfPresence>
<analysis.ExportExchangeSBetweenCatchments>
<analysis.ExportExchangesBetweenCatchments>
<synchronisationMode>ASYNCHRONOUS</synchronisationMode>
<consoleDisplay>false</consoleDisplay>
<years>
......@@ -368,7 +368,7 @@
</years>
<fluxesSeason>SPRING</fluxesSeason>
<fileNameOutput>effectiveFluxes</fileNameOutput>
</analysis.ExportExchangeSBetweenCatchments>
</analysis.ExportExchangesBetweenCatchments>
</processesAtEnd>
</processes>
<useCemetery>false</useCemetery>
......
......@@ -353,14 +353,14 @@
<migrationSeasonToReachSummeringOffshore>SPRING</migrationSeasonToReachSummeringOffshore>
</species.MigrateBetweenOffshores>
<analysis.ExportExchangeSBetweenCatchments>
<analysis.ExportExchangesBetweenCatchments>
<synchronisationMode>ASYNCHRONOUS</synchronisationMode>
<consoleDisplay>false</consoleDisplay>
<yearStart>1920</yearStart>
<yearEnd>1950</yearEnd>
<fluxesSeason>SPRING</fluxesSeason>
<fileNameOutput>effectiveFluxes</fileNameOutput>
</analysis.ExportExchangeSBetweenCatchments>
<fileNameOutput></fileNameOutput>
</analysis.ExportExchangesBetweenCatchments>
<environment.updateTemperatureInRIOBasin>
<synchronisationMode>ASYNCHRONOUS</synchronisationMode>
......@@ -375,7 +375,6 @@
<minimumRecruitsForPopulatedBasin>50</minimumRecruitsForPopulatedBasin>
<epsilon>0.001</epsilon>
</analysis.AnalyseLikelihoodOfPresence>
</processesAtEnd>
</processes>
<useCemetery>false</useCemetery>
......
......@@ -360,7 +360,7 @@
<epsilon>0.001</epsilon>
</analysis.AnalyseLikelihoodOfPresence>
<analysis.ExportExchangeSBetweenCatchments>
<analysis.ExportExchangesBetweenCatchments>
<synchronisationMode>ASYNCHRONOUS</synchronisationMode>
<consoleDisplay>false</consoleDisplay>
<years>
......@@ -368,7 +368,7 @@
</years>
<fluxesSeason>SPRING</fluxesSeason>
<fileNameOutput>effectiveFluxes</fileNameOutput>
</analysis.ExportExchangeSBetweenCatchments>
</analysis.ExportExchangesBetweenCatchments>
</processesAtEnd>
</processes>
<useCemetery>false</useCemetery>
......
......@@ -360,7 +360,7 @@
<epsilon>0.001</epsilon>
</analysis.AnalyseLikelihoodOfPresence>
<analysis.ExportExchangeSBetweenCatchments>
<analysis.ExportExchangesBetweenCatchments>
<synchronisationMode>ASYNCHRONOUS</synchronisationMode>
<consoleDisplay>false</consoleDisplay>
<years>
......@@ -368,7 +368,7 @@
</years>
<fluxesSeason>SPRING</fluxesSeason>
<fileNameOutput>effectiveFluxes</fileNameOutput>
</analysis.ExportExchangeSBetweenCatchments>
</analysis.ExportExchangesBetweenCatchments>
</processesAtEnd>
</processes>
<useCemetery>false</useCemetery>
......
......@@ -360,7 +360,7 @@
<epsilon>0.001</epsilon>
</analysis.AnalyseLikelihoodOfPresence>
<analysis.ExportExchangeSBetweenCatchments>
<analysis.ExportExchangesBetweenCatchments>
<synchronisationMode>ASYNCHRONOUS</synchronisationMode>
<consoleDisplay>false</consoleDisplay>
<years>
......@@ -368,7 +368,7 @@
</years>
<fluxesSeason>SPRING</fluxesSeason>
<fileNameOutput>effectiveFluxes</fileNameOutput>
</analysis.ExportExchangeSBetweenCatchments>
</analysis.ExportExchangesBetweenCatchments>
</processesAtEnd>
</processes>
<useCemetery>false</useCemetery>
......
library(tidyverse)
library(rJava) # see J4R
#library(yaml)
library(tidyverse)
......@@ -5,40 +6,90 @@ library(jdx)
rm(list = ls())
# # parameters and prios=rs of parameters to be fitted
# inputData = yaml.load_file("ABC_seq_2param_3stats_NewAfterSA.yaml")
# # load prior
# prior = list()
# for (i in 1:length(inputData$parameters)) {
# p = inputData$parameters[[i]]
# prior[[i]] = unlist(p$prior)
# }
#
# parametersNames = list()
# for (i in 1:length(inputData$parameters)) {
# parametersNames = c(parametersNames, inputData$parameters[[i]]$javapath)
# }
# parametersNames = unlist(parametersNames)
# GR3D arguments -----------------------------------------
## to have the same working directory as GR3D
#setwd("~/Documents/workspace/GR3D/exploration/NEA_sensitivity_analysis")
setwd("../..")
# path to outputs
#outputDir = "simus/"
#outputFileName = paste0("SA/",seed)
# ====================================== LOCAL FUNCTIONS =====================================
convertFromSALtoR = function(java_out){
require(jdx)
# headers
headers = convertToR(java_out[[1]])
# data
R_out = lapply(seq(2, length(java_out)), FUN = function(i){convertToR(java_out[[i]])}) %>%
unlist() %>%
matrix(nrow = length(java_out) - 1, byrow = TRUE, dimnames = list(NULL, headers) ) %>%
data.frame() %>%
suppressMessages(type_convert())
return(R_out)
}
runEasyRun = function(arguments, parametersNamesANG,thetasANG, parametersNamesENV, thetasENV) {
# run the GR3D with updated parameter values in processes --------------------------------------------
.jcall("analysis.EasyRun", "V", "runSimulation", arguments,
.jarray(parametersNamesANG), .jarray(thetasANG),
.jarray(parametersNamesENV), .jarray(thetasENV))
}
exploreEasyRun = function(){
# call GR3D method to get model outputs ----------------------------------------------
# 1. northernmost populated basin_id
# 2. centroid latitude
out_range = .jcall("analysis.EasyRun","[[Ljava/lang/String;","getValuesFromAquanismGroupProcess","processes.processesEachStep.10.exportToR")
range = convertFromSALtoR(out_range)
# 3. log-likelihood ----
# the proportion of reproductions leading to a recruitment > 50 over the 1920-1950 period was used
# as a proxy of predicted probability of presence in a basin.
# Then a log-likelihood of a binomial distribution is computed
logL = .jcall("analysis.EasyRun", "D", "getSingleValueFromAquanismGroupProcess", "processes.processesAtEnd.0.getLogLikelihood");
# 4. proportion of first-time spawners
# annual % of first-time spawners according to latitude (or basin_id)
# between 1920-1950
# then in R: annual slope of the linear regression
# and average over the period
# 5. mean age of female and age
# annual mean age in each basin,
# then in R: average per year,
# and average over the period (1920 and 1950)
out_spawners_features = .jcall("analysis.EasyRun","[[Ljava/lang/String;","getValuesFromAquanismGroupProcess","processes.processesEachStep.9.exportToR")
spawners_features = convertFromSALtoR(out_spawners_features)
## ICI coder pente de la droite moyenne des ages
# ------------------------------------------------------------ #
# 5. number of metapopulation based on the exchange matrix ----
# mean annual exchange matrix between 1920 and 1950
out_exchanges = .jcall("analysis.EasyRun","[[Ljava/lang/String;","getValuesFromAquanismGroupProcess","processes.processesEachStep.16.exportToR")
exchanges = convertFromSALtoR(out_exchanges)
## ICI lancer metapopulationIdentification
return(list(northern_basin_id = range$northern_basin_id,
effective_centroid_latitude = range$effective_centroid_latitude,
logL = logL,
spawners_features = spawners_features,
exchanges = exchanges))
}
# GR3D arguments -----------------------------------------
jarfile = "target/GR3D-3.2-SNAPSHOT.jar"
simDuration = 150
simBegin = 1
timeStepDuration = 1
seed = 1
arguments = c('-q','-simDuration', simDuration, '-simBegin',simBegin,
#'-q',
arguments = c('-simDuration', simDuration, '-simBegin',simBegin,
'-timeStepDuration', timeStepDuration,
'-RNGStatusIndex', format(seed,scientific = FALSE),
'-groups',"data/input/northeastamerica/fishRIOBasin_Sapidissima_Rjava.xml",
......@@ -47,111 +98,66 @@ arguments = c('-q','-simDuration', simDuration, '-simBegin',simBegin,
# -----------------------------------------------------
# initializes the Java Virtual Machine -------------------------------------
.jinit(classpath ="", force.init = TRUE)
.jinit(NULL, force.init = TRUE)
.jinit(classpath = jarfile, force.init = TRUE)
.jclassPath()
# modification of xlm parameters -----------------------
parametersNamesANG = c("processes.processesEachStep.11.Soffset")
thetasANG = c(0)
thetasANG = c(-1)
parametersNamesENV = c("simulationName")
thetasENV = c("noAllee")
runEasyRun = function(arguments, parametersNamesANG,thetasANG, parametersNamesENV, thetasENV ){
# run the GR3D with updated parameter values in processes --------------------------------------------
.jclassPath()
.jcall("analysis.EasyRun", "V", "runSimulation", arguments,
.jarray(parametersNamesANG), .jarray(thetasANG),
.jarray(parametersNamesENV), .jarray(thetasENV))
# call GR3D method to get model outputs ----------------------------------------------
#1. northern limit and distribution centroid ----
out = .jcall("analysis.EasyRun","[[Ljava/lang/String;","getValuesFromAquanismGroupProcess","processes.processesEachStep.10.exportToR")
range <- data.frame(matrix(convertToR(out[[2]]), nrow = 1, dimnames = list(NULL, convertToR(out[[1]])))) %>% type_convert()
return(list(range = range))
}
thetasENV = c("Allee")
# run the GR3D with updated parameter values in processes --------------------------------------------
runEasyRun(arguments, parametersNamesANG,thetasANG, parametersNamesENV, thetasENV )
# run the GR3D with updated parameter values in processes --------------------------------------------
.jclassPath()
.jcall("analysis.EasyRun", "V", "runSimulation", arguments,
.jarray(parametersNamesANG), .jarray(thetasANG),
.jarray(parametersNamesENV), .jarray(thetasENV))
# .jcall("analysis.EasyRun", "V", "runSimulation", arguments,
# .jarray(parametersNamesANG), .jarray(thetasANG),
# .jarray(parametersNamesENV), .jarray(thetasENV))
# call GR3D method to get model outputs ----------------------------------------------
#TODO export [char] and then data.frame(toto) %>% type_convert()
# ----------------------------------------------------------------------------------#
# 1. northern limit and distribution centroid ----
# only basins with more than 50 recruits in average in the last 50 years (colonised basins)
# compute only in SPRING
# access through getRangeDistribution in DiadromousFishGroup ==== NOT CORRECT
# range[1] = latitude of the median distribution ( 50 % of effectives are northern)
# range[2] = southern limit latitude of the fish distribution
out = .jcall("analysis.EasyRun","[[Ljava/lang/String;","getValuesFromAquanismGroupProcess","processes.processesEachStep.10.exportToR")
range <- data.frame(matrix(convertToR(out[[2]]), nrow = 1, dimnames = list(NULL, convertToR(out[[1]])))) %>% type_convert()
# ------------------------------------------------------------ #
# 2. log-likelihood ----
# the proportion of reproductions leading to a recruitment > 50 over the 1920-1950 period was used
# as a proxy of predicted probability of presence in a basin.
# Then a log-likelihood of a binomial distribution is computed
.jcall("analysis.EasyRun", "D", "getSingleValueFromAquanismGroupProcess", "processes.processesAtEnd.0.getLogLikelihood");
# ------------------------------------------------------------ #
# 3. proportion of first-time spawners
# annual % of first-time spawners according to latitude (or basin_id)
# between 1920-1950
# then in R: annual slope of the linear regression
# and average over the period
# 4. Mean age of the spawner for male and female ----
# annual mean age in each basin,
# then in R: average per year,
# and average over the period (1920 and 1950)
# mean (+sd) and slope after
out = .jcall("analysis.EasyRun","[[Ljava/lang/String;","getValuesFromAquanismGroupProcess","processes.processesEachStep.8.exportToR")
convertFromSALtoR = function(java_out){
require(jdx)
out2 = matrix(unlist(lapply(java_out, convertToR)), nrow = length(java_out) , byrow = TRUE)
out3 <- out2[-1,]
colnames(out3) <- unlist(out2[1,])
R_out <- as.data.frame(out3) %>% type_convert()
return(R_out)
res1 = exploreEasyRun()
res1$spawners_features
#
listeSimulation <- list(list(replicat = 1,
parametersNamesANG = c("processes.processesEachStep.11.Soffset"),
thetasANG = c(-1),
parametersNamesENV = c("simulationName"),
thetasENV = c("Allee")),
list(replicat = 1,
parametersNamesANG = c("processes.processesEachStep.11.Soffset"),
thetasANG = c(0),
parametersNamesENV = c("simulationName"),
thetasENV = c("no Allee")))
for (i in 1:length(listeSimulation)) {
arguments = c('-simDuration', simDuration, '-simBegin',simBegin,
'-timeStepDuration', timeStepDuration,
'-RNGStatusIndex', format(listeSimulation[[i]]$replicat,scientific = FALSE),
'-groups',"data/input/northeastamerica/fishRIOBasin_Sapidissima_Rjava.xml",
'-env',"data/input/northeastamerica/RIOBNneaBasins_Rjava.xml",
'-observers',"data/input/northeastamerica/RIO_obs_empty.xml")
parametersNamesANG = listeSimulation[[i]]$parametersNamesANG
thetasANG = listeSimulation[[i]]$thetasANG
parametersNamesENV = listeSimulation[[i]]$parametersNamesENV
thetasENV = listeSimulation[[i]]$thetasENV
runEasyRun(arguments,
parametersNamesANG,
thetasANG,
parametersNamesENV ,
thetasENV )
res1 = exploreEasyRun()
cat(res1$effective_centroid_latitude, "\n")
}
convertFromSALtoR(java_out = out)
# ------------------------------------------------------------ #
# ------------------------------------------------------------ #
# 5. number of metapopulation based on the exchange matrix ----
# annual exchange matrix between 1920 and 1950
# then in R: compute average matrix over the period,
# run metapopulationIdentification
# moyenne sur les bassins versants des moyennes sur la peride des moyennes des flux
# access through species.IdentifyPopulation in processesAtEnd
#TODO A FAIRE !!!
# =================================================================================
toto = listeSimulation[[i]]$parametersNamesANG
......@@ -322,21 +322,23 @@ public class AnalyseSpawnerFeatures extends AquaNismsGroupProcess<DiadromousFish
public String[][] exportToR() {
int nbBasin = ageOfFemaleMemories.size();
String[][] result = new String[nbBasin + 1][5];
String[][] result = new String[nbBasin + 1][6];
// headers
result[0][0] = "basin_id";
result[0][1] = "basin_name";
result[0][2] = "mean_age_female"; // primiparous and multiparous
result[0][3] = "mean_age _male"; // primiparous and multiparous
result[0][4] = "pct_primiparous";
result[0][2] = "latitude";
result[0][3] = "mean_age_female"; // primiparous and multiparous
result[0][4] = "mean_age _male"; // primiparous and multiparous
result[0][5] = "pct_primiparous";
int i = 1;
for (Entry<RiverBasin, QueueMemory<Double>> entry : ageOfFemaleMemories.entrySet()) {
result[i][0] = String.valueOf(entry.getKey().getBasin_id());
result[i][1] = entry.getKey().getName();
result[i][2] = String.valueOf(entry.getValue().getMean());
result[i][3] = String.valueOf(ageOfMaleMemories.get(entry.getKey()).getMean());
result[i][4] = String.valueOf(primiparousMemories.get(entry.getKey()).getMean());
result[i][2] = String.valueOf(entry.getKey().getLatitude());
result[i][3] = String.valueOf(entry.getValue().getMean());
result[i][4] = String.valueOf(ageOfMaleMemories.get(entry.getKey()).getMean());
result[i][5] = String.valueOf(primiparousMemories.get(entry.getKey()).getMean());
i++;
}
......
......@@ -118,7 +118,7 @@ public class EasyRun {
public static void main(String[] args) throws Exception {
// blank are important
String[] batchArguments = ("-simDuration 40 -simBegin 1 -timeStepDuration 1 -RNGStatusIndex 1 "
String[] batchArguments = ("-simDuration 100 -simBegin 1 -timeStepDuration 1 -RNGStatusIndex 1 "
+ "-groups data/input/northeastamerica/fishRIOBasin_Sapidissima_Rjava.xml" + " "
+ "-env data/input/northeastamerica/RIOBNneaBasins_Rjava.xml" + " "
+ "-observers data/input/northeastamerica/RIO_obs_empty.xml").split("\\ ");
......@@ -156,12 +156,13 @@ public class EasyRun {
System.out.println("\n== AnalyseLikelihoodOfPresence ==");
System.out.println(getSingleValueFromAquanismGroupProcess("processes.processesAtEnd.0.getLogLikelihood"));
// List truc = getAListFromAquanismGroupProcess("processes.processesAtEnd.0.getRecords");
// System.out.println(truc.get(0).toString());
// System.out.println(getAListFromAquanismGroupProcess("processes.processesAtEnd.1.getRecords"));
// System.out.println("\ngetMeanLastPercOfAut");
// System.out.println(Arrays.toString(getValuesFromEnvironment("getMeanLastPercOfAut")));
System.out.println("\n== ExportExchangeSBetweenCatchments ==");
String[][] exchangeSBetweenCatchments = getValuesFromAquanismGroupProcess("processes.processesEachStep.16.exportToR");
for (String[] record : exchangeSBetweenCatchments) {
for (String value : record)
System.out.print(value + "\t");
System.out.println();
}
}
}
......@@ -24,14 +24,18 @@ import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.io.Serializable;
import java.util.Hashtable;
import java.util.Map;
import com.thoughtworks.xstream.XStream;
import com.thoughtworks.xstream.io.xml.DomDriver;
import environment.RIOBasinNetwork;
import environment.RiverBasin;
import environment.Time;
import environment.Time.Season;
import fr.cemagref.simaqualife.kernel.processes.AquaNismsGroupProcess;
import fr.cemagref.simaqualife.pilot.Pilot;
import observer.ObservableRecord;
import species.DiadromousFish;
import species.DiadromousFishGroup;
......@@ -39,7 +43,7 @@ import species.DiadromousFishGroup;
/**
*
*/
public class ExportExchangeSBetweenCatchments extends AquaNismsGroupProcess<DiadromousFish, DiadromousFishGroup> {
public class ExportExchangesBetweenCatchments extends AquaNismsGroupProcess<DiadromousFish, DiadromousFishGroup> {
/**
*
......@@ -111,38 +115,60 @@ public class ExportExchangeSBetweenCatchments extends AquaNismsGroupProcess<Diad
private Season fluxesSeason = Season.SPRING;
private String fileNameOutput = "effectiveFluxes";
private transient RiverBasin[] riverBasins;
private transient BufferedWriter bW;
private transient String sep;
/**
* 1st key: migration basin 2nd key: natal basin 3nd key: year value : effective
*
* @unit
*/
private transient Map<RiverBasin, Map<RiverBasin, Map<Long, Double>>> exchanges;
public static void main(String[] args) {
System.out.println((new XStream(new DomDriver())).toXML(new ExportExchangeSBetweenCatchments()));
System.out.println((new XStream(new DomDriver())).toXML(new ExportExchangesBetweenCatchments()));
}
//
// @Override
// public void initTransientParameters(Pilot pilot) {
// super.initTransientParameters(pilot);
// // create output file if necessary
// if (fileNameOutput != null) {
// sep = ",";
//
// String outputPath = ((DiadromousFishGroup)
// pilot.getAquaticWorld().getAquaNismsGroupsList().get(0)).getOutputPath();
// String simulationId = ((DiadromousFishGroup) pilot.getAquaticWorld().getAquaNismsGroupsList().get(0))
// .getSimulationId();
// try {
// new File(outputPath + fileNameOutput).getParentFile().mkdirs();
// bW = new BufferedWriter(new FileWriter(new File(outputPath + fileNameOutput + simulationId + ".csv")));
//
// bW.write("year" + sep + "destinationBasin" + sep + "originBasin" + sep + "effective");
// bW.write("\n");
// } catch (IOException e) {
// e.printStackTrace();
// }
// }
//
// }
@Override
public void initTransientParameters(Pilot pilot) {
super.initTransientParameters(pilot);
riverBasins = ((RIOBasinNetwork) pilot.getAquaticWorld().getEnvironment()).getRiverBasins();
exchanges = new Hashtable<RiverBasin, Map<RiverBasin, Map<Long, Double>>>();
for (RiverBasin migrationBasin : riverBasins) {
Map<RiverBasin, Map<Long, Double>> natal_yearEffective = new Hashtable<RiverBasin, Map<Long, Double>>();
for (RiverBasin natalBasin : riverBasins) {
Map<Long, Double> year_effective = new Hashtable<Long, Double>();
for (Long year = yearStart; year <= yearEnd; year++) {
year_effective.put(year, 0.);
}
natal_yearEffective.put(natalBasin, year_effective);
}
exchanges.put(migrationBasin, natal_yearEffective);
}
// create output file if necessary
if (fileNameOutput != null) {
sep = ",";
String outputPath = ((DiadromousFishGroup) pilot.getAquaticWorld().getAquaNismsGroupsList().get(0)).getOutputPath();
String simulationId = ((RIOBasinNetwork) pilot.getAquaticWorld().getEnvironment()).getSimulationId();
try {
new File(outputPath + fileNameOutput).getParentFile().mkdirs();
bW = new BufferedWriter(new FileWriter(new File(outputPath + fileNameOutput + simulationId + ".csv")));
bW.write("year" + sep + "destinationBasin" + sep + "originBasin" + sep + "effective");
bW.write("\n");
} catch (IOException e) {
e.printStackTrace();
}
}
}
/*
......@@ -153,46 +179,30 @@ public class ExportExchangeSBetweenCatchments extends AquaNismsGroupProcess<Diad
@Override
public void doProcess(DiadromousFishGroup group) {
// write header in the file
if (bW == null) {
if (fileNameOutput != null) {
sep = ",";
try {
new File(group.getOutputPath() + fileNameOutput).getParentFile().mkdirs();
bW = new BufferedWriter(new FileWriter(new File(
group.getOutputPath() + fileNameOutput + group.getEnvironment().getSimulationId() + ".csv")));
bW.write("year" + sep + "migrationBasin" + sep + "natalBasin" + sep + "effective");
bW.write("\n");
} catch (IOException e) {
e.printStackTrace();
}
}
}
if (exchanges == null)
initTransientParameters(group.getPilot());
Time time = group.getEnvironment().getTime();
if (time.getYear(group.getPilot()) >= yearStart && time.getYear(group.getPilot()) <= yearEnd
&& time.getSeason(group.getPilot()) == fluxesSeason) {
String[] basinNames = group.getEnvironment().getRiverBasinNames();
long year = time.getYear(group.getPilot());