Commit 08725034 authored by Commandre Benjamin's avatar Commandre Benjamin
Browse files

Initialisation

No related merge requests found
Showing with 1189 additions and 0 deletions
+1189 -0
Roseliere.py 0 → 100644
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Copyright 2016 Sylvio Laventure (IRSTEA - UMR TETIS)
#
# PHYMOBAT 3.0 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PHYMOBAT 3.0 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PHYMOBAT 3.0. If not, see <http://www.gnu.org/licenses/>.
"""
Main interface
__name__ = "Roseliere 0.1"
__license__ = "GPL"
__version__ = "0.1"
__author__ = "COMMANDRÉ Benjamin - UMR TETIS / IRSTEA"
__date__ = "Septembre 2018"
"""
import os, sys, time
from _collections import defaultdict
from osgeo import ogr
import configparser
from app.Processing import Processing
import app.Constantes as Constantes
import app.Outils as Outils
class Roseliere(Processing):
"""
Interface main class. It makes to link ``ui_PHYMOBAT_tab`` and ``Processing``.
"""
def __init__(self, parent=None):
super(Processing, self).__init__()
Processing.__init__(self)
self.logger = Outils.Log("log", "Roseliere")
self.get_variable()
def get_variable(self):
"""
Add a all system value like :
- Main folder path by line edit
- Satellite captor name by combo box
- Classification year by line edit
- Study area shapefile path by line edit
- Connexion username and password by line edit
- VHRS image path by line edit
- MNT image path by line edit
- Segmentation shapefile path path by line edit
- Output classification shapefile path by line edit
- Output shapefile field name by line edit and field type by combo box
"""
configfile = configparser.ConfigParser()
configfile.read("config.ini")
# Main folder path by line edit.
self.path_folder_dpt = "{0}/Traitement".format(configfile["sortie"]["chemin"])
# Satellite captor name by combo box
self.captor_project = "{0}".format(configfile["satellite"]["capteur"])
# Classification year by line edit
self.classif_year = "{0}".format(configfile["emprise"]["annee_debut"])
# Study area shapefile path by line edit
self.path_area = "{0}".format(configfile["emprise"]["chemin"])
# Connexion username and password by line edit
self.user = "{0}".format(configfile["theia"]["identifiant"])
self.password = "{0}".format(configfile["theia"]["mdp"])
self.proxy = "{0}".format(configfile["theia"]["proxy"])
# Output shapefile field name by line edit and field type by combo box
self.output_file = "{0}".format(configfile["sortie"]["chemin"])
def run(self):
"""
Function to launch the processing. This function take account :
- The ``Multi-processing`` check box if the processing has launched with multi process.
By default, this is checked. It need a computer with minimum 12Go memory.
- Append a few system value with :func:`get_variable`.
- There are 3 principal check boxes :
- to get number download available images
- for downloading and processing on theia platform
- to compute optimal threshold.
- to compute slope raster
- for classification processing.
"""
# Start the processus
startTime = time.time()
# # Look at if the the images has been already downloaded, download them otherwise
self.i_download()
# # function to launch the image processing
# self.i_images_processing()
# # Classification processing
# self.i_classifier()
# Initialize variable without to close and launch again the application
Processing.__init__(self)
# End of the processus
endTime = time.time()
self.logger.info('Outputted to File in {0} secondes'.format(endTime - startTime))
nb_day_processing = int(time.strftime('%d', time.gmtime(endTime - startTime))) - 1
self.logger.info("That is {0} day(s) {1}".format(nb_day_processing, time.strftime('%Hh %Mmin%S', time.gmtime(endTime - startTime))))
if __name__ == "__main__":
myapp = Roseliere()
sys.exit(myapp.run())
app/Archive.py 0 → 100644
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# This file is part of PHYMOBAT 3.0.
# Copyright 2016 Sylvio Laventure (IRSTEA - UMR TETIS)
#
# PHYMOBAT 3.0 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PHYMOBAT 3.0 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PHYMOBAT 3.0. If not, see <http://www.gnu.org/licenses/>.
import os, sys, glob, re, time, io
import math, subprocess, json
import urllib.request
import tarfile, zipfile
import requests
import configparser
from osgeo import ogr, gdal, gdal_array
import numpy as np
from uuid import uuid4
import lxml
from collections import defaultdict, UserDict
import app.Constantes as Constantes
import app.Satellites as Satellites
import app.Outils as Outils
class Archive():
"""
Class to list, download and unpack Theia image archive because of a shapefile (box).
This shapefile get extent of the area.
:param captor: Name of the satellite (ex: Landsat or SpotWorldHeritage ...).
Name used to the url on website Theia Land
:type captor: str
:param list_year: Processing's year (string for one year)
:type list_year: list of str
:param box: Path of the study area
:type box: str
:param folder: Path of the source folder
:type folder: str
:param repertory: Name of the archive's folder
:type repertory: str
"""
def __init__(self, captor, list_year, box, repertory, emprise):
"""
Create a new 'Archive' instance
"""
self._captor = captor
self._list_year = list_year.split(";")
self._box = box
self._repertory = repertory
self.emprise = emprise
self.list_archive = []
self.logger = Outils.Log("log", "archive")
self.server = Satellites.SATELLITE[self._captor]["server"]
self.resto = Satellites.SATELLITE[self._captor]["resto"]
self.token_type = Satellites.SATELLITE[self._captor]["token_type"]
self.url = '' # str : complete website JSON database
self.list_img = [] # List (dim 5) to get year, month, day, path of multispectral's images and path of cloud's images
self.single_date = [] # date list without duplication
self.fichier_sauvegarde = "{0}/sauvegarde.ini".format(self._repertory)
self.sauvegarde = configparser.ConfigParser()
self.sauvegarde.read(self.fichier_sauvegarde)
def utm_to_latlng(self, zone, easting, northing, northernHemisphere=True):
"""
Function to convert UTM to geographic coordinates
@param zone: UTM zone
@type zone: int
@param easting: Coordinates UTM in x
@type easting: float
@param northing: Coordinates UTM in y
@type northing: float
@param northernHemisphere: North hemisphere or not
@type northernHemisphere: boolean
:returns: tuple -- integer on the **longitude** and **latitude**
Source : http://www.ibm.com/developerworks/java/library/j-coordconvert/index.html
"""
if not northernHemisphere:
northing = 10000000 - northing
a = 6378137
e = 0.081819191
e1sq = 0.006739497
k0 = 0.9996
arc = northing / k0
mu = arc / (a * (1 - math.pow(e, 2) / 4.0 - 3 * math.pow(e, 4) / 64.0 - 5 * math.pow(e, 6) / 256.0))
ei = (1 - math.pow((1 - e * e), (1 / 2.0))) / (1 + math.pow((1 - e * e), (1 / 2.0)))
ca = 3 * ei / 2 - 27 * math.pow(ei, 3) / 32.0
cb = 21 * math.pow(ei, 2) / 16 - 55 * math.pow(ei, 4) / 32
cc = 151 * math.pow(ei, 3) / 96
cd = 1097 * math.pow(ei, 4) / 512
phi1 = mu + ca * math.sin(2 * mu) + cb * math.sin(4 * mu) + cc * math.sin(6 * mu) + cd * math.sin(8 * mu)
n0 = a / math.pow((1 - math.pow((e * math.sin(phi1)), 2)), (1 / 2.0))
r0 = a * (1 - e * e) / math.pow((1 - math.pow((e * math.sin(phi1)), 2)), (3 / 2.0))
fact1 = n0 * math.tan(phi1) / r0
_a1 = 500000 - easting
dd0 = _a1 / (n0 * k0)
fact2 = dd0 * dd0 / 2
t0 = math.pow(math.tan(phi1), 2)
Q0 = e1sq * math.pow(math.cos(phi1), 2)
fact3 = (5 + 3 * t0 + 10 * Q0 - 4 * Q0 * Q0 - 9 * e1sq) * math.pow(dd0, 4) / 24
fact4 = (61 + 90 * t0 + 298 * Q0 + 45 * t0 * t0 - 252 * e1sq - 3 * Q0 * Q0) * math.pow(dd0, 6) / 720
lof1 = _a1 / (n0 * k0)
lof2 = (1 + 2 * t0 + Q0) * math.pow(dd0, 3) / 6.0
lof3 = (5 - 2 * Q0 + 28 * t0 - 3 * math.pow(Q0, 2) + 8 * e1sq + 24 * math.pow(t0, 2)) * math.pow(dd0, 5) / 120
_a2 = (lof1 - lof2 + lof3) / math.cos(phi1)
_a3 = _a2 * 180 / math.pi
latitude = 180 * (phi1 - fact1 * (fact2 + fact3 + fact4)) / math.pi
if not northernHemisphere:
latitude = -latitude
longitude = ((zone > 0) and (6 * zone - 183.0) or 3.0) - _a3
return (longitude, latitude)
def coord_box_dd(self):
"""
Function to get area's coordinates of shapefile
:returns: str -- **area_coord_corner** : Area coordinates corner
--> Left bottom on x, Left bottom on y, Right top on x, Right top on y
:Example:
>>> import Archive
>>> test = Archive(captor, list_year, box, folder, repertory, proxy_enabled)
>>> coor_test = test.coord_box_dd()
>>> coor_test
'45.52, 2.25, 46.71, 3.27'
"""
# Processus to convert the UTM shapefile
# in decimal degrees shapefile with ogr2ogr in command line
utm_outfile = "{0}/UTM_{1}".format(os.path.split(self._box)[0], os.path.split(self._box)[1])
if os.path.exists(utm_outfile):
os.remove(utm_outfile)
process_tocall = ['ogr2ogr', '-f', 'Esri Shapefile', '-t_srs', 'EPSG:32631', utm_outfile, self._box]
subprocess.call(process_tocall)
# To get shapefile extent
# Avec import ogr
driver = ogr.GetDriverByName('ESRI Shapefile')
# Open shapefile
data_source = driver.Open(utm_outfile, 0)
if data_source is None:
self.logger.error('Could not open file')
sys.exit(1)
shp_ogr = data_source.GetLayer()
# Extent
extent_ = shp_ogr.GetExtent() # (xMin, xMax, yMin, yMax)
## Close source data
data_source.Destroy()
# Coordinates min in decimal degrees
LL_min = self.utm_to_latlng(31, extent_[0], extent_[2], northernHemisphere=True)
# Coordinates max in decimal degrees
LL_max = self.utm_to_latlng(31, extent_[1], extent_[3], northernHemisphere=True)
# AdressUrl = 'http://spirit.cnes.fr/resto/Landsat/?format=html&lang=fr&q=2013&box=' + str(LL_Min[0]) + ',' + str(LL_Min[1]) + ',' + str(LL_Max[0]) + ',' + str(LL_Max[1])
area_coord_corner = str(LL_min[0]) + ',' + str(LL_min[1]) + ',' + str(LL_max[0]) + ',' + str(LL_max[1])
return area_coord_corner
def listing(self):
"""
Function to list available archive on plateform Theia Land, and on the area
"""
# Loop on the years
self.logger.info("Images availables")
for year in self._list_year:
first_date = year.split(',')[0]
# Tricks to put whether a year or a date (year-month-day)
try:
end_date = year.split(',')[1]
self.logger.info("=============== {0} to {1} ===============".format(first_date, end_date))
self.url = "{0}/{1}/api/collections/{2}/search.json?lang=fr&_pretty=true&completionDate={3}&box={4}&maxRecord=500&startDate={5}".format(self.server, self.resto, self._captor, end_date, self.coord_box_dd(), first_date)
except IndexError:
self.logger.info("=============== {0} ===============".format(first_date))
self.url = "{0}/{1}/api/collections/{2}/search.json?lang=fr&_pretty=true&q={3}&box={4}&maxRecord=500".format(self.server, self.resto, self._captor, year, self.coord_box_dd())
# Initialisation variable for a next page
# There is a next page, next = 1
# There isn't next page, next = 0
next_ = 1
# To know path to download images
while next_ == 1:
try :
request_headers = {"User-Agent": "Magic-browser"}
req = urllib.request.Request(self.url, headers = request_headers) # Connexion in the database
data = urllib.request.urlopen(req).read() # Read in the database
new_data = re.sub(b"null", b"'null'", data) # Remove "null" because Python don't like
new_data = re.sub(b"false", b"False", new_data) # Remove "false" and replace by False (Python know False with a capital letter F)
# Transform the data in dictionary
data_Dict = defaultdict(list)
data_Dict = UserDict(eval(new_data))
# Select archives to download
for d in range(len(data_Dict['features'])):
name_archive = data_Dict['features'][d]['properties']['productIdentifier']
feature_id = data_Dict["features"][d]['id']
link_archive = data_Dict['features'][d]['properties']['services']['download']['url'].replace("\\", "")
url_archive = link_archive.replace(self.resto, "rocket/#")
archive_download = url_archive.replace("/download", "") # Path to download
out_archive = "{0}/{1}.tgz".format(self._repertory, name_archive)# Name after download
# if len(self.list_archive) == 0 : self.list_archive.append([archive_download, out_archive, feature_id])
self.list_archive.append([archive_download, out_archive, feature_id])
# Verify if there is another page (next)
for link in data_Dict['properties']['links'] :
if link["title"] is 'suivant' :
self.url = link['href'].replace("\\", "")
next_ = 1
break
else :
next_ = 0
except Exception as e:
self.logger.error("Error connexion or error variable : {0}".format(e))
sys.exit(1)
self.logger.info("{0} image(s) matched the criteria.".format(len(self.list_archive)))
return len(self.list_archive)
def download_auto(self, user, password, proxy=""):
"""
Function to download images archive automatically on Theia land data center.
"""
url = "{0}/services/authenticate/".format(self.server)
proxyDict = {
"http" : "{0}".format(proxy), \
"https" : "{0}".format(proxy), \
"ftp" : "{0}".format(proxy) \
}
payload = {
'ident' : "{0}".format(user),\
'pass' : "{0}".format(password)
}
reponse = requests.post(url, data=payload, proxies=proxyDict)
try:
if "text" in reponse.headers["Content-Type"] :
token = reponse.text
elif "json" in reponse.headers["Content-Type"] :
token = reponse.json()["access_token"]
else :
raise(BaseException('Format non traité.'))
except Exception as e:
self.logger.error("Error during the identification request")
sys.exit(-1)
head = {"Authorization": "Bearer {0}".format(token)}
regex = 'SENTINEL(.+?)_(.+?)-'
dico_date = dict()
for l in self.list_archive :
date = re.search(regex, l[1]).group(2)
nom_image = l[1].split("/")[-1][:-4]
if str(date) in self.sauvegarde.keys() :
if nom_image not in self.sauvegarde[str(date)].keys() :
self.sauvegarde[str(date)][nom_image] = "False"
if date in dico_date.keys() :
dico_date[date].append(l)
else :
dico_date[date] = [l]
elif self.sauvegarde[str(date)][nom_image] == "False" :
if date in dico_date.keys() :
dico_date[date].append(l)
else :
dico_date[date] = [l]
else :
self.sauvegarde[str(date)] = {}
self.sauvegarde[str(date)][nom_image] = "False"
if date in dico_date.keys() :
dico_date[date].append(l)
else :
dico_date[date] = [l]
with open(self.fichier_sauvegarde, 'w') as configfile:
self.sauvegarde.write(configfile)
for cle in sorted(dico_date):
liste_content = []
for img in dico_date[cle]:
url = "{0}/{1}/collections/{2}/{3}/download/?issuerId=theia".format(self.server, self.resto, self._captor, img[2])
reponse = requests.get(url, headers=head, proxies=proxyDict)
liste_content.append(reponse.content)
self.pourc_cloud(cle, liste_content)
del liste_content
for img in dico_date[cle] :
self.sauvegarde[str(cle)][img[1].split("/")[-1][:-4]] = "True"
with open(self.fichier_sauvegarde, 'w') as configfile:
self.sauvegarde.write(configfile)
self.logger.info("All images have been downloaded !")
def calcul_aire(self, tuiles_image):
option_warp = gdal.WarpOptions(format='Mem',resampleAlg="lanczos")
mosaic_image = gdal.Warp("", tuiles_image, options=option_warp)
rows = mosaic_image.RasterYSize # Rows number
cols = mosaic_image.RasterXSize # Columns number
data = mosaic_image.GetRasterBand(1).ReadAsArray(0, 0, cols, rows).astype(np.float16)
mask_spec = np.isin(data, [-10000, math.isnan], invert=True)
area = float((np.sum(mask_spec) * mosaic_image.GetGeoTransform()[1] * abs(mosaic_image.GetGeoTransform()[-1]) )/10000)
self.logger.info("Aire = {0} ha".format(area))
return mosaic_image, area
def calcul_ennuagement(self, tuiles_nuage):
option_warp = gdal.WarpOptions(format='Mem',resampleAlg="lanczos")
mosaic_nuage = gdal.Warp("", tuiles_nuage, options=option_warp)
rows = mosaic_nuage.RasterYSize # Rows number
cols = mosaic_nuage.RasterXSize # Columns number
data = mosaic_nuage.GetRasterBand(1).ReadAsArray(0, 0, cols, rows).astype(np.float16)
mask_spec = np.isin(data, [-10000, math.isnan], invert=True)
# This is the same opposite False where there is 0
mask_cloud = np.isin(data, 0)
# If True in cloud mask, it take spectral image else False
cloud = np.choose(mask_cloud, (False, mask_spec))
# Sum of True. True is cloud
dist = np.sum(cloud)
# Computer cloud's percentage with dist (sum of cloud) by sum of the image's extent
return mosaic_nuage, 1.0 - (float(dist)/(np.sum(mask_spec)) if np.sum(mask_spec) != 0 else 0)
def ecriture_geotiff(self, dataset, chemin):
gdal.AllRegister()
driver = gdal.GetDriverByName('GTiff')
outdata = driver.Create(chemin, dataset.RasterXSize, dataset.RasterYSize, dataset.RasterCount, gdal.GDT_Float32)
outdata.SetGeoTransform(dataset.GetGeoTransform())
outdata.SetProjection(dataset.GetProjection())
for band in range(dataset.RasterCount) :
outdata.GetRasterBand(band + 1).WriteArray(\
dataset.GetRasterBand(band + 1).ReadAsArray(0, 0, dataset.RasterXSize, dataset.RasterYSize).astype(np.float32),\
0,0)
outdata.GetRasterBand(band + 1).FlushCache()
outdata = None
def clip(self, image):
option_clip = gdal.WarpOptions(cropToCutline=True,\
cutlineDSName=self.emprise, outputType=gdal.GDT_Float32 , format='Mem')
return gdal.Warp("", image, options=option_clip)
def pourc_cloud(self, date, liste_content):
self.logger.info("Date : {0}".format(date))
extent_img = ['_SRE_B2.tif', '_SRE_B3.tif', '_SRE_B4.tif', '_SRE_B8.tif']
extent_nuage = ['_CLM_R1.tif']
tuiles_image = []
tuiles_nuage = []
options_vrt = gdal.BuildVRTOptions(separate=True)
options_translate = gdal.TranslateOptions(format="Mem")
self.logger.info("Extraction des images, nombre d'archives : {0}".format(len(liste_content)))
for content in liste_content :
tzip = zipfile.ZipFile(io.BytesIO(content))
# Images list in the archive
zip_img = tzip.namelist()
liste_bandes = []
for extension in extent_img :
img = [f for f in zip_img if extension in f][0]
mmap_name = "/vsimem/"+uuid4().hex
gdal.FileFromMemBuffer(mmap_name, tzip.read(img))
liste_bandes.append(gdal.Open(mmap_name))
vrt = gdal.BuildVRT("", liste_bandes, options= options_vrt)
tuiles_image.append(self.clip(gdal.Translate("", vrt, options=options_translate)))
del liste_bandes
image_nuage = [f for f in zip_img if '_CLM_R1.tif' in f][0]
mmap_name = "/vsimem/"+uuid4().hex
gdal.FileFromMemBuffer(mmap_name, tzip.read(image_nuage))
tuiles_nuage.append(self.clip(gdal.Open(mmap_name)))
gdal.Unlink(mmap_name)
del liste_content
self.logger.info("Calcul de l'aire")
image, aire = self.calcul_aire(tuiles_image)
del tuiles_image
if aire > 0.0 :
self.logger.info("Calcul de l'ennuagement")
nuage, ennuagement = self.calcul_ennuagement(tuiles_nuage)
del tuiles_nuage
if ennuagement == 0.0:
self.logger.info("Sauvegarde des images")
dossier = "{0}/{1}".format(self._repertory, date)
if not os.path.exists(dossier) :
os.mkdir(dossier)
self.ecriture_geotiff(image, "{0}/SENTINEL2.tif".format(dossier))
self.ecriture_geotiff(nuage, "{0}/NUAGE.tif".format(dossier))
del nuage
del image
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
NIVEAU_DEFAUT = logging.DEBUG
CLOUD_THRESHOLD = 0.0
EPSG_PHYMOBAT = 2154
app/Outils.py 0 → 100644
#!/bin/env python
# -*- coding: utf-8 -*-
import argparse
import logging
from logging.handlers import RotatingFileHandler
import signal
from contextlib import contextmanager
import inspect
import traceback
try:
import Constantes
except :
pass
class Fusion(argparse.Action):
def __init__(self, option_strings, dest, nargs=None, **kwargs):
super(Fusion, self).__init__(option_strings, dest, nargs, **kwargs)
def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, ' '.join(values).lower())
class Log(object):
def __init__(self, dossier, nomFichier, niveau=None):
super(Log, self).__init__()
stack = inspect.stack()
nom_classe = stack[1][0].f_locals["self"].__class__.__name__
self.__logger__ = logging.getLogger(nomFichier)
if niveau is None:
try:
niveau = Constantes.NIVEAU_DEFAUT
except :
niveau = logging.DEBUG
self.__logger__.setLevel(niveau)
format_fichier = logging.Formatter('%(asctime)s :: %(levelname)s :: %(message)s', datefmt='%d-%m-%Y %H:%M:%S')
fichierLog = RotatingFileHandler("{0}/{1}.log".format(dossier, nomFichier), 'a', 1000000, 1)
fichierLog.setLevel(niveau)
fichierLog.setFormatter(format_fichier)
self.__logger__.addHandler(fichierLog)
format_console = logging.Formatter('%(asctime)s :: {0} :: %(levelname)s :: %(message)s'.format(nom_classe), datefmt='%d-%m-%Y %H:%M:%S')
console = logging.StreamHandler()
console.setLevel(niveau)
console.setFormatter(format_console)
self.__logger__.addHandler(console)
def info(self, message):
self.__logger__.info(message)
def debug(self, message):
self.__logger__.debug(message)
def warning(self, message):
self.__logger__.warning(message)
def error(self, message):
self.__logger__.error(message)
def critical(self, message):
self.__logger__.critical(message)
def exception(self, message):
self.__logger__.exception(message)
def close(self):
for handler in self.__logger__.handlers[:] :
handler.close()
self.__logger__.removeHandler(handler)
class TimeoutException(Exception): pass
@contextmanager
def limitation_temporelle(secondes):
def signal_handler(signum, frame):
raise TimeoutException
signal.signal(signal.SIGALRM, signal_handler)
signal.alarm(secondes)
try:
yield
finally:
signal.alarm(0)
# Utilisation :
#
# try:
# with limitation_temporelle(temps_en_seconde):
# appel_fonction()
# except TimeoutException:
# pass
\ No newline at end of file
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of PHYMOBAT 2.0.
# Copyright 2016 Sylvio Laventure (IRSTEA - UMR TETIS)
#
# PHYMOBAT 2.0 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PHYMOBAT 2.0 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PHYMOBAT 2.0. If not, see <http://www.gnu.org/licenses/>.
import os
import numpy as np
import configparser
from app.Toolbox import Toolbox
# Class group image
from app.Archive import Archive
import app.Constantes as Constantes
from collections import defaultdict
from multiprocessing import Process
from multiprocessing.managers import BaseManager, DictProxy
class Processing(object):
"""
Main processing.
This way is broken down into 2 parts :
- Image Processing (Search, download and processing)
- Classification
**Main parameters**
@param captor_project: Satellite captor name
@type captor_project: str
@param classif_year: Classification year
@type classif_year: str
@param nb_avalaible_images: Number download available images
@type nb_avalaible_images: int
@param path_folder_dpt: Main folder path
@type path_folder_dpt: str
@param folder_processing: Processing folder name. By default : 'Traitement'
@type folder_processing: str
@param path_area: Study area shapefile
@type path_area: str
**Id information to download on theia platform**
@param user: Connexion Username
@type user: str
@param password: Connexion Password
@type password: str
**Output parameters**
@param output_name_moba: Output classification shapefile
@type output_name_moba: str
@param out_fieldname_carto: Output shapefile field name
@type out_fieldname_carto: list of str
@param out_fieldtype_carto: Output shapefile field type
@type out_fieldtype_carto: list of str (eval ogr pointer)
"""
def __init__(self):
# List of output raster path
self.raster_path = defaultdict(dict)
def i_download(self):
"""
Interface function to download archives on the website Theia Land. This function extract
the number of downloadable image with :func:`Archive.Archive.listing`.
Then, this function download :func:`Archive.Archive.download` and unzip :func:`Archive.Archive.decompress` images
in the archive folder (**folder_archive**).
"""
self.images_folder = "{0}/Images".format(self.path_folder_dpt)
if not os.path.exists(self.images_folder) :
os.makedirs(self.images_folder)
self.check_download = Archive(self.captor_project, self.classif_year, self.path_area, self.images_folder, self.path_area)
self.nb_avalaible_images = self.check_download.listing()
self.check_download.download_auto(self.user, self.password, self.proxy)
self.check_download.decompress()
def i_img_sat(self):
"""
Interface function to processing satellite images:
1. Clip archive images and modify Archive class to integrate clip image path.
With :func:`Toolbox.clip_raster` in ``Toolbox`` module.
2. Search cloud's percentage :func:`RasterSat_by_date.RasterSat_by_date.pourc_cloud`,
select image and compute ndvi index :func:`RasterSat_by_date.RasterSat_by_date.calcul_ndvi`.
If cloud's percentage is greater than 40%, then not select and compute ndvi index.
3. Compute temporal stats on ndvi index [min, max, std, min-max]. With :func:`Toolbox.calc_serie_stats`
in ``Toolbox`` module.
4. Create stats ndvi raster and stats cloud raster.
>>> import RasterSat_by_date
>>> stats_test = RasterSat_by_date(class_archive, big_folder, one_date)
>>> stats_test.complete_raster(stats_test.create_raster(in_raster, stats_data, in_ds), stats_data)
"""
pass
# current_list = Toolbox()
# current_list.vect = self.path_area
# # Map projection of the image and the cloud mask
# for clip_index, clip in enumerate(self.check_download.list_img):
# current_list.image = clip[3]
# current_list.check_proj() # Check if projection is RFG93
# self.check_download.list_img[clip_index][3] = current_list.clip_raster() # Multispectral images
# current_list.image = clip[4]
# current_list.check_proj() # Check if projection is RFG93
# self.check_download.list_img[clip_index][4] = current_list.clip_raster() # Cloud images
# # Images pre-processing
# spectral_out = []
# check_L8 = RasterSat_by_date(self.check_download, self.folder_processing)
# for date in self.check_download.single_date:
# check_L8.mosaic_by_date(date)
# # Search cloud's percentage, select image and compute ndvi index
# if check_L8.pourc_cloud() < Constantes.CLOUD_THRESHOLD :
# tmp = date
# check_L8.calcul_ndvi()
# tmp.extend(check_L8.cloudiness_pourcentage)
# spectral_out.append(tmp)
# # Compute temporal stats on ndvi index [min, max, std, min-max]
# spectral_trans = np.transpose(np.array(spectral_out, dtype=object))
# del spectral_out
# # stats_name = ['Min', 'Date', 'Max', 'Std', 'MaxMin']
# stats_name = ['Min', 'Max']
# path_stat = current_list.calc_serie_stats(spectral_trans, stats_name, self.folder_processing)
# for idx, i in enumerate(stats_name):
# self.out_ndvistats_folder_tab[i] = path_stat[idx]
# check_L8.logger.close()
# current_list.logger.close()
def i_vhrs(self):
"""
Interface function to processing VHRS images.
It create two OTB texture images :
func: `Vhrs.Vhrs` : SFS Texture and Haralick Texture
"""
############################## Create texture image ##############################
# Clip orthography image
pass
# current_path_ortho = Toolbox()
# current_path_ortho.image = self.path_ortho
# current_path_ortho.vect = self.path_area
# current_path_ortho.output = "{0}/MosaiqueOrtho/Clip_{1}".format(self.folder_processing, os.path.basename(self.path_ortho))
# self.path_ortho = current_path_ortho.output
# path_ortho = current_path_ortho.clip_raster()
# texture_irc = Vhrs(path_ortho, self.mp)
# self.out_ndvistats_folder_tab['sfs'] = texture_irc.out_sfs
# self.out_ndvistats_folder_tab['haralick'] = texture_irc.out_haralick
# current_path_ortho.logger.close()
# texture_irc.logger.close()
def i_images_processing(self):
"""
Interface function to launch processing VHRS images : func:`i_vhrs`
and satellite images :func:`i_img_sat` in multi-processing.
"""
pass
# mgr = BaseManager()
# mgr.register('defaultdict', defaultdict, DictProxy)
# mgr.start()
# self.out_ndvistats_folder_tab = mgr.defaultdict(list)
# p_img_sat = Process(target=self.i_img_sat)
# p_vhrs = Process(target=self.i_vhrs)
# self.logger.debug("Multiprocessing : {0}".format(self.mp))
# if self.mp == Constantes.MULTIPROCESSING_DISABLE:
# p_img_sat.start()
# p_img_sat.join()
# p_vhrs.start()
# p_vhrs.join()
# else :
# p_img_sat.start()
# p_vhrs.start()
# p_img_sat.join()
# p_vhrs.join()
# ###################################################################
# self.raster_path["Min"]["PATH"] = self.out_ndvistats_folder_tab['Min']
# self.raster_path["Min"]["BAND"] = 1
# for i in range(1,7) :
# self.raster_path["SFS_{0}".format(i)]["PATH"] = self.out_ndvistats_folder_tab['sfs']
# self.raster_path["SFS_{0}".format(i)]["BAND"] = i
# for i in range(1,9) :
# self.raster_path["HARALICK_{0}".format(i)]["PATH"] = self.out_ndvistats_folder_tab['haralick']
# self.raster_path["HARALICK_{0}".format(i)]["BAND"] = i
# self.raster_path["MAX"]["PATH"] = self.out_ndvistats_folder_tab['Max']
# self.raster_path["MAX"]["BAND"] = 1
# self.logger.info("End of images processing !")
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import collections
SATELLITE = collections.defaultdict(dict)
# Info from Theia-land website or Olivier Hagolle blog
########################## SENTINEL2 ###############################
SATELLITE["SENTINEL2"]["server"] = "https://theia.cnes.fr/atdistrib"
SATELLITE["SENTINEL2"]["resto"] = "resto2"
SATELLITE["SENTINEL2"]["token_type"] = "text"
SATELLITE["SENTINEL2"]["R"] = 2
SATELLITE["SENTINEL2"]["PIR"] = 3
########################## LANDSAT #################################
SATELLITE["Landsat"]["server"] = "https://theia-landsat.cnes.fr"
SATELLITE["Landsat"]["resto"] = "resto"
SATELLITE["Landsat"]["token_type"] = "json"
SATELLITE["Landsat"]["R"] = 3
SATELLITE["Landsat"]["PIR"] = 4
app/Toolbox.py 0 → 100644
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# This file is part of PHYMOBAT 1.2.
# Copyright 2016 Sylvio Laventure (IRSTEA - UMR TETIS)
#
# PHYMOBAT 1.2 is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PHYMOBAT 1.2 is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PHYMOBAT 1.2. If not, see <http://www.gnu.org/licenses/>.
from datetime import date
import otbApplication as otb
import os, sys, subprocess, glob
import numpy as np
import json
import osgeo.gdal
import app.Constantes
import app.Outils
class Toolbox(object):
"""
Class used to grouped small tools to cut raster or compute statistics on a raster.
:param imag: Input image (path)
:type imag: str
:param vect: Extent shapefile (path)
:type vect: str
"""
def __init__(self):
"""
Create a new 'Toolbox' instance
"""
self.image = None
self.vect = None
self.output = None
self.logger = Outils.Log("log", "Toolbox")
def clip_raster(self, **kwargs):
"""
Function to clip a raster with a vector.
The raster created will be in the same folder than the input raster.
With a prefix 'Clip_'.
:kwargs: **rm_rast** (int) - 0 (by default) or 1. Variable to remove the output raster. 0 to keep and 1 to remove.
:returns: str -- variable **outclip**, output raster clip (path).
"""
rm_rast = kwargs['rm_rast'] if kwargs.get('rm_rast') else 0
if self.output is None :
outclip = "{0}/Clip_{1}".format(os.path.dirname(self.image), os.path.basename(self.image))
else :
if not os.path.exists(os.path.dirname(self.output)) :
os.mkdir(os.path.dirname(self.output))
outclip = self.output
options = osgeo.gdal.WarpOptions(dstNodata=-10000, cropToCutline=True,\
cutlineDSName=self.vect,outputType=osgeo.gdal.GDT_Int16 , format='GTiff')
if not os.path.exists(outclip) or rm_rast == 1:
self.logger.info("Raster clip of {0}".format(os.path.basename(self.image)))
osgeo.gdal.Warp(outclip, self.image, options=options)
return outclip
def calc_serie_stats(self, table, stats_name, output_folder):
"""
Function to compute stats on temporal cloud and ndvi spectral table
Ndvi stats : min max
@param table: Spectral data, cloud raster and ndvi raster
@type table: numpy.ndarray
@returns: list of numpy.ndarray -- variable **account_stats**, list of temporal NDVI stats.
numpy.ndarray -- variable **account_cloud**, pixel number clear on the area.
"""
ind = ['min({0})', 'max({0})']
no_data_value = [99999, -10000]
stack_ndvi = list(table[5])
image = "im{0}b1"
otb_MathBand = otb.Registry.CreateApplication("BandMathX")
otb_MathBand.SetParameterStringList("il", stack_ndvi)
path_stats = []
for idx, i in enumerate(ind):
expression = "({0} != {1} ? {0} : im1b1)".format(\
i.format(",".join("({0}>-1000 ? {0} : {1})".format(\
image.format(j+1), no_data_value[idx]) for j in range(len(stack_ndvi)))), no_data_value[idx])
out_ndvistats_raster = "{0}/{1}.TIF".format(output_folder, stats_name[idx])
path_stats.append(out_ndvistats_raster)
otb_MathBand.SetParameterString("exp",expression)
otb_MathBand.SetParameterString("out", out_ndvistats_raster)
otb_MathBand.SetParameterOutputImagePixelType("out", otb.ImagePixelType_float)
otb_MathBand.ExecuteAndWriteOutput()
return path_stats
def check_proj(self):
"""
Function to check if raster's projection is RFG93.
For the moment, PHYMOBAT works with one projection only Lambert 93 EPSG:2154
"""
# Projection for PHYMOBAT
proj_phymobat = 'AUTHORITY["EPSG","{0}"]'.format(Constantes.EPSG_PHYMOBAT)
info_gdal = 'gdalinfo -json {0}'.format(self.image)
info_json = json.loads(subprocess.check_output(info_gdal, shell=True))
# Check if projection is in Lambert 93
if not proj_phymobat in info_json['coordinateSystem']['wkt']:
output_proj = "{0}_L93.tif".format(self.image[:-4])
reproj = "gdalwarp -t_srs EPSG: {0} {1} {2}".format(Constantes.EPSG_PHYMOBAT, self.image, output_proj)
os.system(reproj)
# Remove old file and rename new file like the old file
os.remove(self.image)
os.rename(output_proj, self.image)
\ No newline at end of file
File added
PROJCS["WGS_1984_UTM_Zone_31N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]
\ No newline at end of file
PROJCS["WGS 84 / UTM zone 31N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32631"]]
File added
File added
File added
PROJCS["WGS_1984_UTM_Zone_31N",GEOGCS["GCS_WGS_1984",DATUM["D_WGS_1984",SPHEROID["WGS_1984",6378137,298.257223563]],PRIMEM["Greenwich",0],UNIT["Degree",0.017453292519943295]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["Meter",1]]
\ No newline at end of file
PROJCS["WGS 84 / UTM zone 31N",GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84",6378137,298.257223563,AUTHORITY["EPSG","7030"]],AUTHORITY["EPSG","6326"]],PRIMEM["Greenwich",0,AUTHORITY["EPSG","8901"]],UNIT["degree",0.0174532925199433,AUTHORITY["EPSG","9122"]],AUTHORITY["EPSG","4326"]],PROJECTION["Transverse_Mercator"],PARAMETER["latitude_of_origin",0],PARAMETER["central_meridian",3],PARAMETER["scale_factor",0.9996],PARAMETER["false_easting",500000],PARAMETER["false_northing",0],UNIT["metre",1,AUTHORITY["EPSG","9001"]],AXIS["Easting",EAST],AXIS["Northing",NORTH],AUTHORITY["EPSG","32631"]]
File added
File added
File added
config.ini 0 → 100644
[emprise]
chemin = /media/commandre/8ce2dd88-1425-4161-a3c1-47f566c0285a/Roseliere_test/bagnas/emprise.shp
annee_debut = 2016
[sortie]
chemin = /media/commandre/8ce2dd88-1425-4161-a3c1-47f566c0285a/Roseliere_test/bagnas
[theia]
identifiant = sylvio.laventure@gmail.com
mdp = Sylvio.L34
proxy =
[satellite]
capteur = SENTINEL2
\ No newline at end of file
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment