Commit 2e7db5ce authored by Heraut Louis's avatar Heraut Louis
Browse files

DEB new name new computation foot note

parent fe31f356
No related merge requests found
Showing with 167 additions and 190 deletions
+167 -190
......@@ -135,8 +135,9 @@ datasheet_layout = function (df_data, df_meta, layout_matrix,
type='', trend_period=NULL,
mean_period=NULL, axis_xlim=NULL,
missRect=FALSE, time_header=NULL,
info_header=TRUE, info_ratio=1,
time_ratio=2, var_ratio=3,
info_header=TRUE, foot_note=FALSE,
info_ratio=1, time_ratio=2,
var_ratio=3, foot_ratio=0.5,
df_shapefile=NULL) {
# Name of the document
......@@ -234,7 +235,7 @@ datasheet_layout = function (df_data, df_meta, layout_matrix,
# If datasheets needs to be plot
if ('datasheet' %in% toplot) {
datasheet_panel(list_df2plot, df_meta, trend_period, info_header=info_header, time_header=time_header, layout_matrix=layout_matrix, info_ratio=info_ratio, time_ratio=time_ratio, var_ratio=var_ratio, outdirTmp=outdirTmp)
datasheet_panel(list_df2plot, df_meta, trend_period, info_header=info_header, time_header=time_header, foot_note=foot_note, layout_matrix=layout_matrix, info_ratio=info_ratio, time_ratio=time_ratio, var_ratio=var_ratio, foot_ratio=foot_ratio, outdirTmp=outdirTmp)
}
......
......@@ -213,7 +213,7 @@ map_panel = function (list_df2plot, df_meta, df_shapefile, idPer=1, outdirTmp=''
if (verbose) {
# Prints the name of the map
print(paste('Map for variable : ', var,
" (", round(i/nbp*100, 1), " %)",
" (", round(i/nbp*100, 0), " %)",
sep=''))
}
......
......@@ -477,7 +477,7 @@ matrix_panel = function (list_df2plot, df_meta, trend_period, mean_period, slice
' for region : ', fL,
" (",
round((ifL + nfL*(itype-1)) / (nfL*2) * 100,
1),
0),
" %)",
sep=''))
......
......@@ -291,196 +291,170 @@ get_VCN10trend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
return (res_VCN10trend)
}
### 1.4. tINI date
which_underfirst = function (L, UpLim) {
### 1.4. DEB date
which_underfirst = function (L, UpLim, select_longest=TRUE) {
ID = which(L <= UpLim)
dID = diff(ID)
dID = c(10, dID)
IDjump = which(dID != 1)
Njump = length(IDjump)
Periods = vector(mode='list', length=Njump)
Nperiod = c()
for (i in 1:Njump) {
if (select_longest) {
dID = diff(ID)
dID = c(10, dID)
idStart = IDjump[i]
IDjump = which(dID != 1)
Njump = length(IDjump)
if (i < Njump) {
idEnd = IDjump[i+1] - 1
} else {
idEnd = length(ID)
Periods = vector(mode='list', length=Njump)
Nperiod = c()
for (i in 1:Njump) {
idStart = IDjump[i]
if (i < Njump) {
idEnd = IDjump[i+1] - 1
} else {
idEnd = length(ID)
}
period = ID[idStart:idEnd]
Periods[[i]] = period
Nperiod = c(Nperiod, length(period))
}
period = ID[idStart:idEnd]
Periods[[i]] = period
Nperiod = c(Nperiod, length(period))
period_max = Periods[[which.max(Nperiod)]]
id = period_max[1]
} else {
id = ID[1]
}
period_max = Periods[[which.max(Nperiod)]]
id = period_max[1]
return (id)
}
get_tINItrend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
get_DEBtrend = function (df_data, df_meta, period, p_thresold, sampleSpan, thresold_type='VCN10', select_longest=TRUE) {
# Get all different stations code
Code = levels(factor(df_meta$code))
# Gets the number of station
nCode = length(Code)
# # Blank tibble to store the data averaged
# df_data_roll = tibble()
# # For all the code
# for (code in Code) {
# # Get the data associated to the code
# df_data_code = df_data[df_data$code == code,]
# # Perform the roll mean of the flow over 10 days
# df_data_roll_code = tibble(Date=df_data_code$Date,
# Value=rollmean(df_data_code$Value,
# 10,
# fill=NA),
# code=code)
# # Store the results
# df_data_roll = bind_rows(df_data_roll, df_data_roll_code)
# }
# Blank tibble to store the data averaged
df_data_roll = tibble()
# For all the code
for (code in Code) {
# Get the data associated to the code
df_data_code = df_data[df_data$code == code,]
# Perform the roll mean of the flow over 10 days
df_data_roll_code = tibble(Date=df_data_code$Date,
Value=rollmean(df_data_code$Value,
10,
fill=NA),
code=code)
# Store the results
df_data_roll = bind_rows(df_data_roll, df_data_roll_code)
}
# Removes incomplete data from time series
df_data = remove_incomplete_data(df_data,
df_meta=df_meta,
yearLac_pct=1)
# Samples the data
df_data = sampling_data(df_data,
df_meta=df_meta,
sampleSpan=sampleSpan)
# Removes incomplete data from the averaged time series
df_data_roll = remove_incomplete_data(df_data_roll,
df_meta=df_meta,
yearLac_pct=1)
# Samples the data
df_data_roll = sampling_data(df_data_roll,
df_meta=df_meta,
sampleSpan=sampleSpan)
# Make sure to convert the period to a list
period = as.list(period)
# Set the max interval period as the minimal possible
Imax = 0
# Blank tibble for data to return
df_tINItrendB = tibble()
df_DEBtrendB = tibble()
# For all periods
for (per in period) {
df_tINIEx = tibble()
df_tINIlist = list(data=tibble(), info=tibble())
if (thresold_type == 'QNj') {
# Prepare the data to fit the entry of extract.Var
df_QTlist = prepare(df_data,
colnamegroup=c('code'))
} else if (thresold_type == 'VCN10') {
# Prepare the data to fit the entry of extract.Var
df_QTlist = prepare(df_data_roll,
colnamegroup=c('code'))
}
# Compute the yearly mean over the data
df_QTEx = extract.Var(data.station=df_QTlist,
funct=min,
timestep='year',
period=per,
pos.datetime=1,
na.rm=TRUE)
df_QT = summarise(group_by(df_QTEx, group1),
values=max(values, na.rm=TRUE))
# Renames the column of group of trend results
colnames(df_QT) = c('group', 'Thresold')
df_QT = full_join(df_QT, df_QTlist$info, by='group')
df_QT = df_QT[-1]
# print(df_QT)
df_DEBEx = tibble()
df_DEBlist = list(data=tibble(), info=tibble())
# For all the code
for (k in 1:nCode) {
# Gets the code
code = Code[k]
# print(code)
# Get the data associated to the code
df_data_code = df_data[df_data$code == code,]
# Perform the roll mean of the flow over 10 days
df_data_roll_code = tibble(Date=df_data_code$Date,
Value=rollmean(df_data_code$Value,
10,
fill=NA),
code=code)
per.start = df_meta$start_year[df_meta$code == code]
per.start = paste(sprintf("%02d", per.start), '-01', sep='')
# print('aa')
# Removes incomplete data from time series
df_data_code = remove_incomplete_data(df_data_code,
df_meta=NULL,
yearLac_pct=1,
yearStart=per.start,
Code=code)
# Samples the data
df_data_code = sampling_data(df_data_code,
df_meta=NULL,
sampleSpan=sampleSpan,
Code=code)
# Removes incomplete data from the averaged time series
df_data_roll_code =
remove_incomplete_data(df_data_roll_code,
df_meta=NULL,
yearLac_pct=1,
yearStart=per.start,
Code=code)
# Samples the data
df_data_roll_code = sampling_data(df_data_roll_code,
df_meta=NULL,
sampleSpan=sampleSpan,
Code=code)
# print('bb')
# Get the averaged data associated to the code
df_data_roll_code = df_data_roll[df_data_roll$code == code,]
# Prepare the data to fit the entry of extract.Var
df_QNAlist_code = prepare(df_data_code,
df_DEBlist_code = prepare(df_data_roll_code,
colnamegroup=c('code'))
# print('cc')
QT_code = df_QT$Thresold[df_QT$code == code]
# Compute the yearly mean over the data
df_QNAEx_code = extract.Var(data.station=df_QNAlist_code,
funct=min,
timestep='year',
# Compute the yearly min over the averaged data
df_DEBEx_code = extract.Var(data.station=df_DEBlist_code,
funct=which_underfirst,
period=per,
per.start=per.start,
timestep='year',
pos.datetime=1,
na.rm=TRUE)
UpLim=QT_code,
select_longest=select_longest)
# print('dd')
df_DEBEx_code$group1 = k
df_DEBlist_code$data$group = k
df_DEBlist_code$info$group = k
QNAmax = max(df_QNAEx_code$values, na.rm=TRUE)
# Converts index of the DEB to the julian date associated
df_DEBEx_code = prepare_date(df_DEBEx_code,
df_DEBlist_code)
# print('ee')
# Prepare the data to fit the entry of extract.Var
df_tINIlist_code = prepare(df_data_roll_code,
colnamegroup=c('code'))
# print('ff')
# Compute the yearly min over the averaged data
df_tINIEx_code = extract.Var(data.station=df_tINIlist_code,
funct=which_underfirst,
period=per,
per.start=per.start,
timestep='year',
pos.datetime=1,
UpLim=QNAmax)
# print('gg')
# print(df_tINIEx_code)
df_tINIEx_code$group1 = k
df_tINIlist_code$data$group = k
df_tINIlist_code$info$group = k
# Converts index of the tINI to the julian date associated
df_tINIEx_code = prepare_date(df_tINIEx_code,
df_tINIlist_code,
per.start=per.start)
# print('hh')
# print(df_tINIEx_code)
# Store the results
df_tINIEx = bind_rows(df_tINIEx, df_tINIEx_code)
df_DEBEx = bind_rows(df_DEBEx, df_DEBEx_code)
df_tINIlist$data = bind_rows(df_tINIlist$data,
df_tINIlist_code$data)
df_tINIlist$info = bind_rows(df_tINIlist$info,
df_tINIlist_code$info)
# print('ii')
df_DEBlist$data = bind_rows(df_DEBlist$data,
df_DEBlist_code$data)
df_DEBlist$info = bind_rows(df_DEBlist$info,
df_DEBlist_code$info)
}
# print('11')
# Compute the trend analysis
df_tINItrend = Estimate.stats(data.extract=df_tINIEx,
df_DEBtrend = Estimate.stats(data.extract=df_DEBEx,
level=p_thresold)
# Get the associated time interval
......@@ -489,27 +463,25 @@ get_tINItrend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
if (I > Imax) {
# Store it and the associated data and info
Imax = I
df_tINIlistB = df_tINIlist
df_tINIExB = df_tINIEx
df_DEBlistB = df_DEBlist
df_DEBExB = df_DEBEx
}
# print(per.start)
# Specify the period of analyse
df_tINItrend = get_period(per, df_tINItrend, df_tINIEx,
df_tINIlist)
df_DEBtrend = get_period(per, df_DEBtrend, df_DEBEx,
df_DEBlist)
# Store the trend
df_tINItrendB = bind_rows(df_tINItrendB, df_tINItrend)
df_DEBtrendB = bind_rows(df_DEBtrendB, df_DEBtrend)
}
# Clean results of trend analyse
res_tINItrend = clean(df_tINItrendB, df_tINIExB, df_tINIlistB)
return (res_tINItrend)
res_DEBtrend = clean(df_DEBtrendB, df_DEBExB, df_DEBlistB)
return (res_DEBtrend)
}
### 1.5. tMID date
### 1.5. CEN date
# Realises the trend analysis of the date of the minimum 10 day
# average flow over the year (VCN10) hydrological variable
get_tMIDtrend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
get_CENtrend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
# Get all different stations code
Code = levels(factor(df_meta$code))
......@@ -543,24 +515,24 @@ get_tMIDtrend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
# Set the max interval period as the minimal possible
Imax = 0
# Blank tibble for data to return
df_tMIDtrendB = tibble()
df_CENtrendB = tibble()
# For all periods
for (per in period) {
# Prepare the data to fit the entry of extract.Var
df_tMIDlist = prepare(df_data_roll, colnamegroup=c('code'))
df_CENlist = prepare(df_data_roll, colnamegroup=c('code'))
# Compute the yearly min over the averaged data
df_tMIDEx = extract.Var(data.station=df_tMIDlist,
df_CENEx = extract.Var(data.station=df_CENlist,
funct=which.min,
period=per,
timestep='year',
pos.datetime=1)
# Converts index of the tMID to the julian date associated
df_tMIDEx = prepare_date(df_tMIDEx, df_tMIDlist)
# Converts index of the CEN to the julian date associated
df_CENEx = prepare_date(df_CENEx, df_CENlist)
# Compute the trend analysis
df_tMIDtrend = Estimate.stats(data.extract=df_tMIDEx,
df_CENtrend = Estimate.stats(data.extract=df_CENEx,
level=p_thresold)
# Get the associated time interval
......@@ -569,19 +541,19 @@ get_tMIDtrend = function (df_data, df_meta, period, p_thresold, sampleSpan) {
if (I > Imax) {
# Store it and the associated data and info
Imax = I
df_tMIDlistB = df_tMIDlist
df_tMIDExB = df_tMIDEx
df_CENlistB = df_CENlist
df_CENExB = df_CENEx
}
# Specify the period of analyse
df_tMIDtrend = get_period(per, df_tMIDtrend, df_tMIDEx,
df_tMIDlist)
df_CENtrend = get_period(per, df_CENtrend, df_CENEx,
df_CENlist)
# Store the trend
df_tMIDtrendB = bind_rows(df_tMIDtrendB, df_tMIDtrend)
df_CENtrendB = bind_rows(df_CENtrendB, df_CENtrend)
}
# Clean results of trend analyse
res_tMIDtrend = clean(df_tMIDtrendB, df_tMIDExB, df_tMIDlistB)
return (res_tMIDtrend)
res_CENtrend = clean(df_CENtrendB, df_CENExB, df_CENlistB)
return (res_CENtrend)
}
......
......@@ -247,7 +247,7 @@ reprepare = function(df_XEx, df_Xlist, colnamegroup=NULL) {
prepare_date = function(df_XEx, df_Xlist, per.start="01-01") {
df_dateStart = summarise(group_by(df_Xlist$data, group),
Date = min(Date))
Date=min(Date))
# filter(group_by(df_Xlist$data, group), Date == min(Date))
df_dateStart$Date_julian = NA
......
......@@ -63,11 +63,11 @@ filename =
# "P0885010_HYDRO_QJM.txt",
# "O5055010_HYDRO_QJM.txt",
# "O0384010_HYDRO_QJM.txt",
# "S4214010_HYDRO_QJM.txt",
# "Q7002910_HYDRO_QJM.txt",
"O3035210_HYDRO_QJM.txt",
"O0554010_HYDRO_QJM.txt",
"O1584610_HYDRO_QJM.txt"
# "S4214010_HYDRO_QJM.txt"
"Q7002910_HYDRO_QJM.txt"
# "O3035210_HYDRO_QJM.txt",
# "O0554010_HYDRO_QJM.txt",
# "O1584610_HYDRO_QJM.txt"
)
......@@ -245,17 +245,19 @@ res_VCN10trend = get_VCN10trend(df_data, df_meta,
sampleSpan=sampleSpan)
# Start date for low water trend
res_tINItrend = get_tINItrend(df_data, df_meta,
period=trend_period,
p_thresold=p_thresold,
sampleSpan=sampleSpan)
# res_tINItrend = read_listofdf(resdir, 'res_tINItrend')
res_DEBtrend = get_DEBtrend(df_data, df_meta,
period=trend_period,
p_thresold=p_thresold,
sampleSpan=sampleSpan,
thresold_type='VCN10',
select_longest=TRUE)
# res_DEBtrend = read_listofdf(resdir, 'res_DEBtrend')
# Center date for low water trend
res_tMIDtrend = get_tMIDtrend(df_data, df_meta,
period=trend_period,
p_thresold=p_thresold,
sampleSpan=sampleSpan)
res_CENtrend = get_CENtrend(df_data, df_meta,
period=trend_period,
p_thresold=p_thresold,
sampleSpan=sampleSpan)
### 3.3. Break analysis
# df_break = get_break(res_QAtrend$data, df_meta)
......@@ -301,20 +303,20 @@ datasheet_layout(toplot=c(
df_data=list(res_QAtrend$data,
res_QMNAtrend$data,
res_VCN10trend$data,
res_tINItrend$data,
res_tMIDtrend$data),
res_DEBtrend$data,
res_CENtrend$data),
df_trend=list(res_QAtrend$trend,
res_QMNAtrend$trend,
res_VCN10trend$trend,
res_tINItrend$trend,
res_tMIDtrend$trend),
res_DEBtrend$trend,
res_CENtrend$trend),
var=list('QA',
'QMNA',
'VCN10',
'tINI',
'tMID'),
'DEB',
'CEN'),
type=list('sévérité',
'sévérité',
......@@ -329,9 +331,11 @@ datasheet_layout(toplot=c(
mean_period=mean_period,
info_header=TRUE,
time_header=df_data,
foot_note=TRUE,
info_ratio=2,
time_ratio=2,
var_ratio=3,
foot_ratio=0.5,
df_shapefile=df_shapefile,
figdir=figdir,
filename_opt='')
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment