Commit 328ffd89 authored by jbferet's avatar jbferet
Browse files

changed name functions

- changed . into _ for function names
- removed capital letters when needed in function names
- changed Spatial.Units into window_size
parent 2560bc5a
......@@ -19,7 +19,7 @@
#' @importFrom snow splitRows
#' @importFrom future plan multiprocess sequential
#' @importFrom future.apply future_lapply
Apply.Continuum.Removal <- function(Spectral.Data, Spectral, nbCPU = 1) {
apply_continuum_removal <- function(Spectral.Data, Spectral, nbCPU = 1) {
if (!length(Spectral$WaterVapor) == 0) {
Spectral.Data <- Spectral.Data[, -Spectral$WaterVapor]
}
......@@ -60,7 +60,7 @@ Apply.Continuum.Removal <- function(Spectral.Data, Spectral, nbCPU = 1) {
ContinuumRemoval <- function(Minit, Spectral.Bands) {
# Filter and prepare data prior to continuum removal
CR.data <- Filter.Prior.CR(Minit, Spectral.Bands)
CR.data <- filter_prior_CR(Minit, Spectral.Bands)
Minit <- CR.data$Minit
nb.Bands <- dim(Minit)[2]
CR.data$Minit <- c()
......@@ -141,7 +141,7 @@ ContinuumRemoval <- function(Minit, Spectral.Bands) {
#
# @return updated Minit
#' @importFrom matrixStats rowSds
Filter.Prior.CR <- function(Minit, Spectral.Bands) {
filter_prior_CR <- function(Minit, Spectral.Bands) {
# number of samples to be processed
nb.Samples <- nrow(Minit)
......
......@@ -22,7 +22,7 @@
#'
#' @return ImPathShade = updated shademask file
#' @export
Perform.Radiometric.Filtering <- function(Image.Path, Mask.Path, Output.Dir, TypePCA = "SPCA", NDVI.Thresh = 0.5, Blue.Thresh = 500, NIR.Thresh = 1500, Blue = 480, Red = 700, NIR = 835) {
perform_radiometric_filtering <- function(Image.Path, Mask.Path, Output.Dir, TypePCA = "SPCA", NDVI.Thresh = 0.5, Blue.Thresh = 500, NIR.Thresh = 1500, Blue = 480, Red = 700, NIR = 835) {
# define full output directory
Output.Dir.Full <- Define.Output.Dir(Output.Dir, Image.Path, TypePCA)
# define dimensions of the image
......@@ -42,7 +42,7 @@ Perform.Radiometric.Filtering <- function(Image.Path, Mask.Path, Output.Dir, Typ
print("Update mask based on NDVI, NIR and Blue threshold")
}
Shade.Update <- paste(Output.Dir.Full, "ShadeMask_Update", sep = "")
Mask.Path <- Create.Mask.Optical(Image.Path, Mask.Path, Shade.Update, NDVI.Thresh, Blue.Thresh, NIR.Thresh, Blue, Red, NIR)
Mask.Path <- create_mask_from_threshold(Image.Path, Mask.Path, Shade.Update, NDVI.Thresh, Blue.Thresh, NIR.Thresh, Blue, Red, NIR)
return(Mask.Path)
}
......@@ -60,7 +60,7 @@ Perform.Radiometric.Filtering <- function(Image.Path, Mask.Path, Output.Dir, Typ
# @param NIR.Thresh NIR threshold applied to produce a mask (select pixels with NIR refl < NIR.Thresh) refl expected between 0 and 10000
#
# @return ImPathShade path for the updated shademask produced
Create.Mask.Optical <- function(ImPath, ImPathShade, ImPathShade.Update, NDVI.Thresh, Blue.Thresh, NIR.Thresh, Blue = 480, Red = 700, NIR = 835) {
create_mask_from_threshold <- function(ImPath, ImPathShade, ImPathShade.Update, NDVI.Thresh, Blue.Thresh, NIR.Thresh, Blue = 480, Red = 700, NIR = 835) {
# define wavelength corresponding to the spectral domains Blue, Red and NIR
Spectral.Bands <- c(Blue, Red, NIR)
ImPathHDR <- Get.HDR.Name(ImPath)
......
......@@ -49,13 +49,13 @@ Center.Reduce <- function(X, m, sig) {
# change resolution in a HDR file
#
# @param HDR information read from a header file
# @param Spatial.Unit multiplying factor for initial resolution
# @param window_size multiplying factor for initial resolution
#
# @return updated HDR information
Change.Resolution.HDR <- function(HDR, Spatial.Unit) {
Change.Resolution.HDR <- function(HDR, window_size) {
MapInfo <- strsplit(HDR$`map info`, split = ",")
MapInfo[[1]][6] <- as.numeric(MapInfo[[1]][6]) * Spatial.Unit
MapInfo[[1]][7] <- as.numeric(MapInfo[[1]][7]) * Spatial.Unit
MapInfo[[1]][6] <- as.numeric(MapInfo[[1]][6]) * window_size
MapInfo[[1]][7] <- as.numeric(MapInfo[[1]][7]) * window_size
HDR$`map info` <- paste(MapInfo[[1]], collapse = ",")
return(HDR)
}
......@@ -1050,13 +1050,13 @@ Split.Image <- function(HDR, LimitSizeGb = FALSE) {
# revert resolution in a HDR file
#
# @param HDR information read from a header file
# @param Spatial.Unit multiplying factor for initial resolution
# @param window_size multiplying factor for initial resolution
#
# @return updated HDR information
Revert.Resolution.HDR <- function(HDR, Spatial.Unit) {
Revert.Resolution.HDR <- function(HDR, window_size) {
MapInfo <- strsplit(HDR$`map info`, split = ",")
MapInfo[[1]][6] <- as.numeric(MapInfo[[1]][6]) / Spatial.Unit
MapInfo[[1]][7] <- as.numeric(MapInfo[[1]][7]) / Spatial.Unit
MapInfo[[1]][6] <- as.numeric(MapInfo[[1]][6]) / window_size
MapInfo[[1]][7] <- as.numeric(MapInfo[[1]][7]) / window_size
HDR$`map info` <- paste(MapInfo[[1]], collapse = ",")
return(HDR)
}
......
This diff is collapsed.
......@@ -14,7 +14,7 @@
#'
#' @param Input.Image.File character. Path and name of the image to be processed.
#' @param Output.Dir character. Output directory.
#' @param Spatial.Unit numeric. Dimensions of the spatial unit.
#' @param window_size numeric. Dimensions of the spatial unit.
#' @param TypePCA character. Type of PCA (PCA, SPCA, NLPCA...).
#' @param nbclusters numeric. Number of clusters defined in k-Means.
#' @param Nb.Units.Ordin numeric. Maximum number of spatial units to be processed in NMDS.
......@@ -29,7 +29,7 @@
#' @param pcelim numeric. Minimum contribution (in \%) required for a spectral species
#'
#' @export
map_beta_div <- function(Input.Image.File, Output.Dir, Spatial.Unit,
map_beta_div <- function(Input.Image.File, Output.Dir, window_size,
TypePCA = "SPCA", nbclusters = 50,
Nb.Units.Ordin = 2000, MinSun = 0.25,
pcelim = 0.02, scaling = "PCO", FullRes = TRUE,
......@@ -39,86 +39,17 @@ map_beta_div <- function(Input.Image.File, Output.Dir, Spatial.Unit,
Output.Dir.SS <- Define.Output.SubDir(Output.Dir, Input.Image.File, TypePCA, "SpectralSpecies")
Output.Dir.BETA <- Define.Output.SubDir(Output.Dir, Input.Image.File, TypePCA, "BETA")
load(file = WS_Save)
Beta <- Compute.BetaDiversity(Output.Dir.SS, MinSun, Nb.Units.Ordin, NbIter,
Beta <- compute_beta_metrics(Output.Dir.SS, MinSun, Nb.Units.Ordin, NbIter,
nbclusters, pcelim, scaling = scaling,
nbCPU = nbCPU, MaxRAM = MaxRAM)
# Create images corresponding to Beta-diversity
print("Write beta diversity maps")
Index <- paste("BetaDiversity_BCdiss_", scaling, sep = "")
Beta.Path <- paste(Output.Dir.BETA, Index, "_", Spatial.Unit, sep = "")
Write.Image.Beta(Beta$BetaDiversity, Beta$HDR, Beta.Path, Spatial.Unit, FullRes = FullRes, LowRes = LowRes)
Beta.Path <- paste(Output.Dir.BETA, Index, "_", window_size, sep = "")
write_raster_beta(Beta$BetaDiversity, Beta$HDR, Beta.Path, window_size, FullRes = FullRes, LowRes = LowRes)
return()
}
# maps beta diversity indicator based on spectral species distribution
# and saves in directories specifying the number of clusters
#
# @param Input.Image.File Path and name of the image to be processed
# @param Output.Dir output directory
# @param Spatial.Unit dimensions of the spatial unit
# @param TypePCA Type of PCA (PCA, SPCA, NLPCA...)
# @param nbclusters number of clusters defined in k-Means
# @param Nb.Units.Ordin maximum number of spatial units to be processed in NMDS
# @param MinSun minimum proportion of sunlit pixels required to consider plot
# @param pcelim minimum contribution (in %) required for a spectral species
# --> 1000 will be fast but may not capture important patterns if large area
# @param scaling
# @param FullRes
# @param LowRes
# @param nbCPU
# @param MaxRAM
# --> 4000 will be slow but may show better ability to capture landscape patterns
# @return
Map.Beta.Diversity.TestnbCluster <- function(Input.Image.File, Output.Dir, Spatial.Unit, TypePCA = "SPCA", nbclusters = 50, Nb.Units.Ordin = 2000, MinSun = 0.25, pcelim = 0.02, scaling = "PCO", FullRes = TRUE, LowRes = FALSE, nbCPU = FALSE, MaxRAM = FALSE) {
Output.Dir2 <- Define.Output.Dir(Output.Dir, Input.Image.File, TypePCA)
WS_Save <- paste(Output.Dir, "PCA_Info.RData", sep = "")
Output.Dir.SS <- Define.Output.SubDir(Output.Dir, Input.Image.File, TypePCA, paste("SpectralSpecies_", nbclusters, sep = ""))
Output.Dir.BETA <- Define.Output.SubDir(Output.Dir, Input.Image.File, TypePCA, paste("BETA_", nbclusters, sep = ""))
load(file = WS_Save)
Beta <- Compute.BetaDiversity(Output.Dir.SS, MinSun, Nb.Units.Ordin, NbIter, nbclusters, pcelim, scaling = scaling, nbCPU = nbCPU, MaxRAM = MaxRAM)
# Create images corresponding to Beta-diversity
print("Write beta diversity maps")
Index <- paste("BetaDiversity_BCdiss_", scaling, sep = "")
Beta.Path <- paste(Output.Dir.BETA, Index, "_", Spatial.Unit, sep = "")
Write.Image.Beta(Beta$BetaDiversity, Beta$HDR, Beta.Path, Spatial.Unit, FullRes = FullRes, LowRes = LowRes)
return()
}
# Gets sunlit pixels from SpectralSpecies_Distribution_Sunlit
#
# @param ImPathSunlit path for SpectralSpecies_Distribution_Sunlit
# @param MinSun minimum proportion of sunlit pixels required
#
# @return
Get.Sunlit.Pixels <- function(ImPathSunlit, MinSun) {
# Filter out spatial units showing poor illumination
Sunlit.HDR <- Get.HDR.Name(ImPathSunlit)
HDR.Sunlit <- read.ENVI.header(Sunlit.HDR)
nbpix <- as.double(HDR.Sunlit$lines) * as.double(HDR.Sunlit$samples)
fid <- file(
description = ImPathSunlit, open = "rb", blocking = TRUE,
encoding = getOption("encoding"), raw = FALSE
)
Sunlit <- readBin(fid, numeric(), n = nbpix, size = 4)
close(fid)
Sunlit <- aperm(array(Sunlit, dim = c(HDR.Sunlit$samples, HDR.Sunlit$lines)))
# define sunlit spatial units
Select.Sunlit <- which(Sunlit > MinSun)
# define where to extract each datapoint in the file
coordi <- ((Select.Sunlit - 1) %% HDR.Sunlit$lines) + 1
coordj <- floor((Select.Sunlit - 1) / HDR.Sunlit$lines) + 1
# sort based on line and col (important for optimal scan of large files)
coordTot <- cbind(coordi, coordj)
# sort samples from top to bottom in order to optimize read/write of the image
# indxTot saves the order of the data for reconstruction later
indxTot <- order(coordTot[, 1], coordTot[, 2], na.last = FALSE)
coordTotSort <- coordTot[indxTot, ]
Select.Sunlit <- Select.Sunlit[indxTot]
my_list <- list("Select.Sunlit" = Select.Sunlit, "coordTotSort" = coordTotSort)
return(my_list)
}
# computes NMDS
#
......@@ -127,7 +58,7 @@ Get.Sunlit.Pixels <- function(ImPathSunlit, MinSun) {
# @return BetaNMDS.sel
#' @importFrom future plan multiprocess sequential
#' @importFrom future.apply future_lapply
Compute.NMDS <- function(MatBCdist) {
compute_NMDS <- function(MatBCdist) {
nbiterNMDS <- 4
if (Sys.info()["sysname"] == "Windows") {
nbCoresNMDS <- 2
......@@ -171,7 +102,7 @@ Compute.NMDS <- function(MatBCdist) {
#' @importFrom snow splitRows
#' @importFrom future plan multiprocess sequential
#' @importFrom future.apply future_lapply
Ordination.to.NN <- function(Beta.Ordination.sel, SSD.Path, Sample.Sel, coordTotSort, NbIter, nbclusters, pcelim, nbCPU = FALSE) {
ordination_to_NN <- function(Beta.Ordination.sel, SSD.Path, Sample.Sel, coordTotSort, NbIter, nbclusters, pcelim, nbCPU = FALSE) {
nb.Sunlit <- dim(coordTotSort)[1]
# define number of samples to be sampled each time during paralle processing
nb.samples.per.sub <- round(1e7 / dim(Sample.Sel)[1])
......@@ -184,7 +115,7 @@ Ordination.to.NN <- function(Beta.Ordination.sel, SSD.Path, Sample.Sel, coordTot
plan(multiprocess, workers = nbCPU) ## Parallelize using four cores
Schedule.Per.Thread <- ceiling(nb.sub / nbCPU)
OutPut <- future_lapply(id.sub,
FUN = Ordination.Parallel, coordTotSort = coordTotSort, SSD.Path = SSD.Path,
FUN = ordination_parallel, coordTotSort = coordTotSort, SSD.Path = SSD.Path,
Sample.Sel = Sample.Sel, Beta.Ordination.sel = Beta.Ordination.sel, Nb.Units.Ordin = Nb.Units.Ordin,
NbIter = NbIter, nbclusters = nbclusters, pcelim = pcelim, future.scheduling = Schedule.Per.Thread,
future.packages = c("vegan", "dissUtils", "R.utils", "tools", "snow", "matlab")
......@@ -208,7 +139,7 @@ Ordination.to.NN <- function(Beta.Ordination.sel, SSD.Path, Sample.Sel, coordTot
# @param pcelim
#
# @return list of mean and SD of alpha diversity metrics
Ordination.Parallel <- function(id.sub, coordTotSort, SSD.Path, Sample.Sel, Beta.Ordination.sel, Nb.Units.Ordin, NbIter, nbclusters, pcelim) {
ordination_parallel <- function(id.sub, coordTotSort, SSD.Path, Sample.Sel, Beta.Ordination.sel, Nb.Units.Ordin, NbIter, nbclusters, pcelim) {
# get Spectral species distribution
coordPix <- coordTotSort[id.sub, ]
......@@ -221,13 +152,13 @@ Ordination.Parallel <- function(id.sub, coordTotSort, SSD.Path, Sample.Sel, Beta
ub <- i * nbclusters
SSDList[[1]] <- SSD.NN[, lb:ub]
SSDList[[2]] <- Sample.Sel[, lb:ub]
MatBCtmp0 <- Compute.BCdiss(SSDList, pcelim)
MatBCtmp0 <- compute_BCdiss(SSDList, pcelim)
MatBCtmp <- MatBCtmp + MatBCtmp0
}
MatBCtmp <- MatBCtmp / NbIter
# get the knn closest neighbors from each kernel
knn <- 3
OutPut <- Compute.NN.From.Ordination(MatBCtmp, knn, Beta.Ordination.sel)
OutPut <- compute_NN_from_ordination(MatBCtmp, knn, Beta.Ordination.sel)
return(OutPut)
}
......@@ -245,12 +176,12 @@ Ordination.Parallel <- function(id.sub, coordTotSort, SSD.Path, Sample.Sel, Beta
#
# @return
#' @importFrom labdsv pco
Compute.BetaDiversity <- function(Output.Dir, MinSun, Nb.Units.Ordin, NbIter, nbclusters, pcelim, scaling = "PCO", nbCPU = FALSE, MaxRAM = FALSE) {
compute_beta_metrics <- function(Output.Dir, MinSun, Nb.Units.Ordin, NbIter, nbclusters, pcelim, scaling = "PCO", nbCPU = FALSE, MaxRAM = FALSE) {
# Define path for images to be used
SSD.Path <- paste(Output.Dir, "SpectralSpecies_Distribution", sep = "")
ImPathSunlit <- paste(Output.Dir, "SpectralSpecies_Distribution_Sunlit", sep = "")
# Get illuminated pixels based on SpectralSpecies_Distribution_Sunlit
Sunlit.Pixels <- Get.Sunlit.Pixels(ImPathSunlit, MinSun)
Sunlit.Pixels <- get_sunlit_pixels(ImPathSunlit, MinSun)
Select.Sunlit <- Sunlit.Pixels$Select.Sunlit
nb.Sunlit <- length(Select.Sunlit)
# Define spatial units processed through ordination and those processed through
......@@ -282,7 +213,7 @@ Compute.BetaDiversity <- function(Output.Dir, MinSun, Nb.Units.Ordin, NbIter, nb
ub <- i * nbclusters
SSDList[[1]] <- Sample.Sel[, lb:ub]
SSDList[[2]] <- Sample.Sel[, lb:ub]
BC.from.SSD <- Compute.BCdiss(SSDList, pcelim)
BC.from.SSD <- compute_BCdiss(SSDList, pcelim)
MatBC <- MatBC + BC.from.SSD
}
MatBC <- MatBC / NbIter
......@@ -293,7 +224,7 @@ Compute.BetaDiversity <- function(Output.Dir, MinSun, Nb.Units.Ordin, NbIter, nb
# core management seems better on linux --> 4 cores possible
MatBCdist <- as.dist(MatBC, diag = FALSE, upper = FALSE)
if (scaling == "NMDS") {
Beta.Ordination.sel <- Compute.NMDS(MatBCdist)
Beta.Ordination.sel <- compute_NMDS(MatBCdist)
} else if (scaling == "PCO") {
BetaPCO <- pco(MatBCdist, k = 3)
Beta.Ordination.sel <- BetaPCO$points
......@@ -302,7 +233,7 @@ Compute.BetaDiversity <- function(Output.Dir, MinSun, Nb.Units.Ordin, NbIter, nb
# Perform nearest neighbor on spatial units excluded from Ordination
print("BC dissimilarity between samples selected for Ordination and remaining")
coordTotSort <- Sunlit.Pixels$coordTotSort
Ordination.est <- Ordination.to.NN(Beta.Ordination.sel, SSD.Path, Sample.Sel, coordTotSort, NbIter, nbclusters, pcelim, nbCPU = nbCPU)
Ordination.est <- ordination_to_NN(Beta.Ordination.sel, SSD.Path, Sample.Sel, coordTotSort, NbIter, nbclusters, pcelim, nbCPU = nbCPU)
# Reconstuct spatialized beta diversity map from previous computation
Sunlit.HDR <- Get.HDR.Name(ImPathSunlit)
......@@ -331,7 +262,7 @@ Compute.BetaDiversity <- function(Output.Dir, MinSun, Nb.Units.Ordin, NbIter, nb
#
# @return MatBC matrix of bray curtis dissimilarity matrix
#' @importFrom dissUtils diss
Compute.BCdiss <- function(SSDList, pcelim) {
compute_BCdiss <- function(SSDList, pcelim) {
# compute the proportion of each spectral species
# Here, the proportion is computed with regards to the total number of sunlit pixels
# One may want to determine if the results are similar when the proportion is computed
......@@ -375,7 +306,7 @@ Compute.BCdiss <- function(SSDList, pcelim) {
# @param BetaDiversity0
#
# @return estimated NMDS position based on nearest neighbors from NMDS
Compute.NN.From.Ordination <- function(MatBC3, knn, BetaDiversity0) {
compute_NN_from_ordination <- function(MatBC3, knn, BetaDiversity0) {
Ordin.est <- matrix(0, ncol = 3, nrow = nrow(MatBC3))
for (i in 1:nrow(MatBC3)) {
NNtmp <- sort(MatBC3[i, ], decreasing = FALSE, index.return = TRUE)
......@@ -393,13 +324,13 @@ Compute.NN.From.Ordination <- function(MatBC3, knn, BetaDiversity0) {
# @param Image 2D matrix of image to be written
# @param HDR.SSD hdr template derived from SSD to modify
# @param ImagePath path of image file to be written
# @param Spatial.Unit spatial units use dto compute diversiy (in pixels)
# @param window_size spatial units use dto compute diversiy (in pixels)
# @param FullRes should full resolution image be written (original pixel size)
# @param LowRes should low resolution image be written (one value per spatial unit)
#
# @return
Write.Image.Beta <- function(Image, HDR.SSD, ImagePath, Spatial.Unit, FullRes = TRUE, LowRes = FALSE) {
# Write image with resolution corresponding to Spatial.Unit
write_raster_beta <- function(Image, HDR.SSD, ImagePath, window_size, FullRes = TRUE, LowRes = FALSE) {
# Write image with resolution corresponding to window_size
HDR.Beta <- HDR.SSD
HDR.Beta$bands <- 3
HDR.Beta$`data type` <- 4
......@@ -424,9 +355,9 @@ Write.Image.Beta <- function(Image, HDR.SSD, ImagePath, Spatial.Unit, FullRes =
if (FullRes == TRUE) {
# Write image with Full native resolution
HDR.Full <- HDR.Beta
HDR.Full$samples <- HDR.Beta$samples * Spatial.Unit
HDR.Full$lines <- HDR.Beta$lines * Spatial.Unit
HDR.Full <- Revert.Resolution.HDR(HDR.Full, Spatial.Unit)
HDR.Full$samples <- HDR.Beta$samples * window_size
HDR.Full$lines <- HDR.Beta$lines * window_size
HDR.Full <- Revert.Resolution.HDR(HDR.Full, window_size)
ImagePath.FullRes <- paste(ImagePath, "_Fullres", sep = "")
headerFpath <- paste(ImagePath.FullRes, ".hdr", sep = "")
write.ENVI.header(HDR.Full, headerFpath)
......@@ -435,7 +366,7 @@ Write.Image.Beta <- function(Image, HDR.SSD, ImagePath, Spatial.Unit, FullRes =
for (b in 1:3) {
for (i in 1:HDR.SSD$lines) {
for (j in 1:HDR.SSD$samples) {
Image.FullRes[((i - 1) * Spatial.Unit + 1):(i * Spatial.Unit), ((j - 1) * Spatial.Unit + 1):(j * Spatial.Unit), b] <- Image[i, j, b]
Image.FullRes[((i - 1) * window_size + 1):(i * window_size), ((j - 1) * window_size + 1):(j * window_size), b] <- Image[i, j, b]
}
}
}
......@@ -451,3 +382,74 @@ Write.Image.Beta <- function(Image, HDR.SSD, ImagePath, Spatial.Unit, FullRes =
}
return("")
}
# Gets sunlit pixels from SpectralSpecies_Distribution_Sunlit
#
# @param ImPathSunlit path for SpectralSpecies_Distribution_Sunlit
# @param MinSun minimum proportion of sunlit pixels required
#
# @return
get_sunlit_pixels <- function(ImPathSunlit, MinSun) {
# Filter out spatial units showing poor illumination
Sunlit.HDR <- Get.HDR.Name(ImPathSunlit)
HDR.Sunlit <- read.ENVI.header(Sunlit.HDR)
nbpix <- as.double(HDR.Sunlit$lines) * as.double(HDR.Sunlit$samples)
fid <- file(
description = ImPathSunlit, open = "rb", blocking = TRUE,
encoding = getOption("encoding"), raw = FALSE
)
Sunlit <- readBin(fid, numeric(), n = nbpix, size = 4)
close(fid)
Sunlit <- aperm(array(Sunlit, dim = c(HDR.Sunlit$samples, HDR.Sunlit$lines)))
# define sunlit spatial units
Select.Sunlit <- which(Sunlit > MinSun)
# define where to extract each datapoint in the file
coordi <- ((Select.Sunlit - 1) %% HDR.Sunlit$lines) + 1
coordj <- floor((Select.Sunlit - 1) / HDR.Sunlit$lines) + 1
# sort based on line and col (important for optimal scan of large files)
coordTot <- cbind(coordi, coordj)
# sort samples from top to bottom in order to optimize read/write of the image
# indxTot saves the order of the data for reconstruction later
indxTot <- order(coordTot[, 1], coordTot[, 2], na.last = FALSE)
coordTotSort <- coordTot[indxTot, ]
Select.Sunlit <- Select.Sunlit[indxTot]
my_list <- list("Select.Sunlit" = Select.Sunlit, "coordTotSort" = coordTotSort)
return(my_list)
}
# # maps beta diversity indicator based on spectral species distribution
# # and saves in directories specifying the number of clusters
# #
# # @param Input.Image.File Path and name of the image to be processed
# # @param Output.Dir output directory
# # @param window_size dimensions of the spatial unit
# # @param TypePCA Type of PCA (PCA, SPCA, NLPCA...)
# # @param nbclusters number of clusters defined in k-Means
# # @param Nb.Units.Ordin maximum number of spatial units to be processed in NMDS
# # @param MinSun minimum proportion of sunlit pixels required to consider plot
# # @param pcelim minimum contribution (in %) required for a spectral species
# # --> 1000 will be fast but may not capture important patterns if large area
# # @param scaling
# # @param FullRes
# # @param LowRes
# # @param nbCPU
# # @param MaxRAM
# # --> 4000 will be slow but may show better ability to capture landscape patterns
# # @return
# Map.Beta.Diversity.TestnbCluster <- function(Input.Image.File, Output.Dir, window_size, TypePCA = "SPCA", nbclusters = 50, Nb.Units.Ordin = 2000, MinSun = 0.25, pcelim = 0.02, scaling = "PCO", FullRes = TRUE, LowRes = FALSE, nbCPU = FALSE, MaxRAM = FALSE) {
# Output.Dir2 <- Define.Output.Dir(Output.Dir, Input.Image.File, TypePCA)
# WS_Save <- paste(Output.Dir, "PCA_Info.RData", sep = "")
# Output.Dir.SS <- Define.Output.SubDir(Output.Dir, Input.Image.File, TypePCA, paste("SpectralSpecies_", nbclusters, sep = ""))
# Output.Dir.BETA <- Define.Output.SubDir(Output.Dir, Input.Image.File, TypePCA, paste("BETA_", nbclusters, sep = ""))
# load(file = WS_Save)
# Beta <- compute_beta_metrics(Output.Dir.SS, MinSun, Nb.Units.Ordin, NbIter, nbclusters, pcelim, scaling = scaling, nbCPU = nbCPU, MaxRAM = MaxRAM)
# # Create images corresponding to Beta-diversity
# print("Write beta diversity maps")
# Index <- paste("BetaDiversity_BCdiss_", scaling, sep = "")
# Beta.Path <- paste(Output.Dir.BETA, Index, "_", window_size, sep = "")
# write_raster_beta(Beta$BetaDiversity, Beta$HDR, Beta.Path, window_size, FullRes = FullRes, LowRes = LowRes)
# return()
# }
......@@ -22,7 +22,7 @@
#' @param nbclusters number of clusters defined in k-Means
#'
#' @export
Map.Spectral.Species <- function(Input.Image.File, Output.Dir, PCA.Files, TypePCA = "SPCA", nbclusters = 50, nbCPU = 1, MaxRAM = FALSE) {
map_spectral_species <- function(Input.Image.File, Output.Dir, PCA.Files, TypePCA = "SPCA", nbclusters = 50, nbCPU = 1, MaxRAM = FALSE) {
# for each image
Output.Dir2 <- Define.Output.Dir(Output.Dir, Input.Image.File, TypePCA)
......@@ -63,10 +63,10 @@ Map.Spectral.Species <- function(Input.Image.File, Output.Dir, PCA.Files, TypePC
## 2- PERFORM KMEANS FOR EACH ITERATION & DEFINE SPECTRAL SPECIES ##
print("perform k-means clustering for each subset and define centroids")
# scaling factor subPCA between 0 and 1
Kmeans.info <- Init.Kmeans(dataPCA, Pix.Per.Iter, NbIter, nbclusters, nbCPU)
Kmeans.info <- init_kmeans(dataPCA, Pix.Per.Iter, NbIter, nbclusters, nbCPU)
## 3- ASSIGN SPECTRAL SPECIES TO EACH PIXEL ##
print("apply Kmeans to the whole image and determine spectral species")
Apply.Kmeans(PCA.Files, PC.Select, ImPathShade, Kmeans.info, Spectral.Species.Path, nbCPU, MaxRAM)
apply_kmeans(PCA.Files, PC.Select, ImPathShade, Kmeans.info, Spectral.Species.Path, nbCPU, MaxRAM)
}
return()
}
......@@ -83,7 +83,7 @@ Map.Spectral.Species <- function(Input.Image.File, Output.Dir, PCA.Files, TypePC
#' @importFrom future plan multiprocess sequential
#' @importFrom future.apply future_lapply
#' @importFrom stats kmeans
Init.Kmeans <- function(dataPCA, Pix.Per.Iter, NbIter, nbclusters, nbCPU = 1) {
init_kmeans <- function(dataPCA, Pix.Per.Iter, NbIter, nbclusters, nbCPU = 1) {
m0 <- apply(dataPCA, 2, function(x) min(x))
M0 <- apply(dataPCA, 2, function(x) max(x))
d0 <- M0 - m0
......@@ -115,7 +115,7 @@ Init.Kmeans <- function(dataPCA, Pix.Per.Iter, NbIter, nbclusters, nbCPU = 1) {
# @param Spectral.Species.Path path for spectral species file to be written
#
# @return
Apply.Kmeans <- function(PCA.Path, PC.Select, ImPathShade, Kmeans.info, Spectral.Species.Path, nbCPU = 1, MaxRAM = FALSE) {
apply_kmeans <- function(PCA.Path, PC.Select, ImPathShade, Kmeans.info, Spectral.Species.Path, nbCPU = 1, MaxRAM = FALSE) {
ImPathHDR <- Get.HDR.Name(PCA.Path)
HDR.PCA <- read.ENVI.header(ImPathHDR)
PCA.Format <- ENVI.Type2Bytes(HDR.PCA)
......@@ -160,7 +160,7 @@ Apply.Kmeans <- function(PCA.Path, PC.Select, ImPathShade, Kmeans.info, Spectral
Location.RW$lenBin.Shade <- SeqRead.Shade$ReadByte.End[i] - SeqRead.Shade$ReadByte.Start[i] + 1
Location.RW$Byte.Start.SS <- 1 + (SeqRead.Shade$ReadByte.Start[i] - 1) * nbIter
Location.RW$lenBin.SS <- nbIter * (SeqRead.Shade$ReadByte.End[i] - SeqRead.Shade$ReadByte.Start[i]) + 1
Compute.Spectral.Species(PCA.Path, ImPathShade, Spectral.Species.Path, Location.RW, PC.Select, Kmeans.info, nbCPU)
compute_spectral_species(PCA.Path, ImPathShade, Spectral.Species.Path, Location.RW, PC.Select, Kmeans.info, nbCPU)
}
return()
}
......@@ -181,7 +181,7 @@ Apply.Kmeans <- function(PCA.Path, PC.Select, ImPathShade, Kmeans.info, Spectral
#' @importFrom snow splitRows
#' @importFrom future plan multiprocess sequential
#' @importFrom future.apply future_lapply
Compute.Spectral.Species <- function(PCA.Path, ImPathShade, Spectral.Species.Path, Location.RW, PC.Select, Kmeans.info, nbCPU = 1) {
compute_spectral_species <- function(PCA.Path, ImPathShade, Spectral.Species.Path, Location.RW, PC.Select, Kmeans.info, nbCPU = 1) {
# characteristics of the centroids computed during preprocessing
nbIter <- length(Kmeans.info$Centroids)
......
......@@ -23,7 +23,7 @@
#'
#' @return list of paths corresponding to resulting PCA files
#' @export
Perform.PCA.Image <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal = TRUE, TypePCA = "SPCA", FilterPCA = FALSE, Excluded.WL = FALSE, NbIter = 20, nbCPU = 1, MaxRAM = 0.25) {
perform_PCA <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal = TRUE, TypePCA = "SPCA", FilterPCA = FALSE, Excluded.WL = FALSE, NbIter = 20, nbCPU = 1, MaxRAM = 0.25) {
# define the path corresponding to image, mask and output directory
ImNames <- list()
ImNames$Input.Image <- ImPath
......@@ -34,7 +34,7 @@ Perform.PCA.Image <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal
# Extract valid data subset and check validity
print("Extract pixels from the images to perform PCA on a subset")
# define number of pixels to be extracted from the image for each iteration
Pix.Per.Iter <- Define.Pixels.Per.Iter(ImNames, NbIter = NbIter)
Pix.Per.Iter <- define_pixels_per_iter(ImNames, NbIter = NbIter)
nb.Pix.To.Sample <- NbIter * Pix.Per.Iter
ImPathHDR <- Get.HDR.Name(ImPath)
HDR <- read.ENVI.header(ImPathHDR)
......@@ -42,7 +42,7 @@ Perform.PCA.Image <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal
Subset <- Get.Random.Subset.From.Image(ImPath, HDR, ImPathShade, NbIter, Pix.Per.Iter)
# if needed, apply continuum removal
if (Continuum.Removal == TRUE) {
Subset$DataSubset <- Apply.Continuum.Removal(Subset$DataSubset, Spectral, nbCPU = nbCPU)
Subset$DataSubset <- apply_continuum_removal(Subset$DataSubset, Spectral, nbCPU = nbCPU)
}
# if number of pixels available inferior number initial sample size
if (Subset$nbPix2Sample < nb.Pix.To.Sample) {
......@@ -86,13 +86,13 @@ Perform.PCA.Image <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal
}
Shade.Update <- paste(Output.Dir2, "ShadeMask_Update_PCA", sep = "")
Subset.PCA <- PCA.model$dataPCA[, PCsel]
ImPathShade <- Filter.PCA(ImPath, HDR, ImPathShade, Shade.Update, Spectral, Continuum.Removal, PCA.model, PCsel, TypePCA, nbCPU = nbCPU, MaxRAM = MaxRAM)
ImPathShade <- filter_PCA(ImPath, HDR, ImPathShade, Shade.Update, Spectral, Continuum.Removal, PCA.model, PCsel, TypePCA, nbCPU = nbCPU, MaxRAM = MaxRAM)
## Compute PCA 2 based on the updated shade mask ##
# extract a random selection of pixels from image
Subset <- Get.Random.Subset.From.Image(ImPath, HDR, ImPathShade, NbIter, Pix.Per.Iter)
# if needed, apply continuum removal
if (Continuum.Removal == TRUE) {
Subset$DataSubset <- Apply.Continuum.Removal(Subset$DataSubset, Spectral, nbCPU = nbCPU)
Subset$DataSubset <- apply_continuum_removal(Subset$DataSubset, Spectral, nbCPU = nbCPU)
}
# if number of pixels available inferior number initial sample size
if (Subset$nbPix2Sample < nb.Pix.To.Sample) {
......@@ -127,7 +127,7 @@ Perform.PCA.Image <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal
print("Apply PCA model to the whole image")
Output.Dir.PCA <- Define.Output.SubDir(Output.Dir, ImPath, TypePCA, "PCA")
PCA.Path <- paste(Output.Dir.PCA, "OutputPCA_", Nb.PCs, "_PCs", sep = "")
Create.PCA.Image(ImPath, ImPathShade, PCA.Path, PCA.model, Spectral, Nb.PCs, Continuum.Removal, TypePCA, nbCPU, MaxRAM = MaxRAM)
write_PCA_raster(ImPath, ImPathShade, PCA.Path, PCA.model, Spectral, Nb.PCs, Continuum.Removal, TypePCA, nbCPU, MaxRAM = MaxRAM)
# save workspace for this stage
WS_Save <- paste(Output.Dir2, "PCA_Info.RData", sep = "")
save(PCA.model, Pix.Per.Iter, NbIter, ImPathShade, file = WS_Save)
......@@ -150,7 +150,7 @@ Perform.PCA.Image <- function(ImPath, ImPathShade, Output.Dir, Continuum.Removal
# @param PCsel PCs used to filter out extreme values
#
# @return Shade.Update = updated shade mask
Filter.PCA <- function(ImPath, HDR, ImPathShade, Shade.Update, Spectral, CR, PCA.model, PCsel, TypePCA, nbCPU = 1, MaxRAM = 0.25) {
filter_PCA <- function(ImPath, HDR, ImPathShade, Shade.Update, Spectral, CR, PCA.model, PCsel, TypePCA, nbCPU = 1, MaxRAM = 0.25) {
# 1- get extreme values falling outside of mean +- 3SD for PCsel first components
# compute mean and sd of the 5 first components of the sampled data
......@@ -228,12 +228,12 @@ Filter.PCA <- function(ImPath, HDR, ImPathShade, Shade.Update, Spectral, CR, PCA
keepShade <- which(Shade.Chunk == 1)
Image.Chunk <- Image.Chunk[keepShade, ]
# these two lines are perfomed in Apply.Continuum.Removal
# these two lines are perfomed in apply_continuum_removal
# # Eliminate water vapor
# Image.Chunk = Image.Chunk[,Spectral$Bands2Keep]
# apply Continuum removal if needed
if (CR == TRUE) {
Image.Chunk <- Apply.Continuum.Removal(Image.Chunk, Spectral, nbCPU = nbCPU)
Image.Chunk <- apply_continuum_removal(Image.Chunk, Spectral, nbCPU = nbCPU)
}
# remove constant bands if needed
if (!length(Spectral$BandsNoVar) == 0) {
......@@ -309,7 +309,7 @@ Filter.PCA <- function(ImPath, HDR, ImPathShade, Shade.Update, Spectral, CR, PCA
# @param MaxRAM max RAM when initial image is read (in Gb)
#
# @return
Create.PCA.Image <- function(ImPath, ImPathShade, PCA.Path, PCA.model, Spectral, Nb.PCs, CR, TypePCA, nbCPU = 1, MaxRAM = 0.25) {
write_PCA_raster <- function(ImPath, ImPathShade, PCA.Path, PCA.model, Spectral, Nb.PCs, CR, TypePCA, nbCPU = 1, MaxRAM = 0.25) {
ImPathHDR <- Get.HDR.Name(ImPath)
HDR <- read.ENVI.header(ImPathHDR)
ShadeHDR <- Get.HDR.Name(ImPathShade)
......@@ -372,7 +372,7 @@ Create.PCA.Image <- function(ImPath, ImPathShade, PCA.Path, PCA.model, Spectral,
# apply Continuum removal if needed
if (CR == TRUE) {
Image.Chunk <- Apply.Continuum.Removal(Image.Chunk, Spectral, nbCPU = nbCPU)
Image.Chunk <- apply_continuum_removal(Image.Chunk, Spectral, nbCPU = nbCPU)
## added June 5, 2019
if (length(Spectral$BandsNoVar) > 0) {
Image.Chunk <- Image.Chunk[, -Spectral$BandsNoVar]
......@@ -561,7 +561,7 @@ pca <- function(X, type) {
# @param NbIter number of iterations peformed to average diversity indices
#
# @return Pix.Per.Iter number of pixels per iteration