Commit d045d298 authored by Gaetano Raffaele's avatar Gaetano Raffaele
Browse files

Merge remote-tracking branch 'origin/develop' into develop

# Conflicts:
#	moringa.py
No related merge requests found
Showing with 175 additions and 120 deletions
+175 -120
from osgeo import gdal
from osgeo import ogr
from pyproj import Transformer as T
def get_query_bbox(shp, query_srs=4326, return_all=False):
ds = ogr.Open(shp)
ly = ds.GetLayer(0)
xt = ly.GetExtent()
shp_srs = int(ly.GetSpatialRef().GetAuthorityCode(None))
i_bbox = [xt[0], xt[2], xt[1], xt[3]]
if shp_srs == query_srs:
bbox = i_bbox
else:
tr = T.from_crs(shp_srs, query_srs, always_xy=True)
lon_min, lat_min = tr.transform(xt[0], xt[2])
lon_max, lat_max = tr.transform(xt[1], xt[3])
bbox = [lon_min, lat_min, lon_max, lat_max]
if return_all:
return bbox, i_bbox, shp_srs
else:
return bbox
\ No newline at end of file
...@@ -9,8 +9,10 @@ from sklearn.ensemble import RandomForestClassifier ...@@ -9,8 +9,10 @@ from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix, accuracy_score, cohen_kappa_score, precision_recall_fscore_support from sklearn.metrics import confusion_matrix, accuracy_score, cohen_kappa_score, precision_recall_fscore_support
class ObjectBasedClassifier: class ObjectBasedClassifier:
def __init__(self, object_layer, reference_data, time_series_patterns, user_feature_list, ref_class_field='class'): def __init__(self, object_layer, reference_data, time_series_patterns, user_feature_list, ref_class_field='class',
self.obia_base = OBIABase(object_layer, ref_data = reference_data, ref_class_field=ref_class_field) ref_id_field='id'):
self.obia_base = OBIABase(object_layer, ref_data=reference_data, ref_class_field=ref_class_field,
ref_id_field=ref_id_field)
for ptrn in time_series_patterns: for ptrn in time_series_patterns:
lst = sorted(glob.glob(ptrn)) lst = sorted(glob.glob(ptrn))
self.obia_base.add_raster_time_series_for_stats(lst) self.obia_base.add_raster_time_series_for_stats(lst)
......
...@@ -174,11 +174,14 @@ class OBIABase: ...@@ -174,11 +174,14 @@ class OBIABase:
tile_obj = np.squeeze(clip_obj['array']).astype(np.uint32) tile_obj = np.squeeze(clip_obj['array']).astype(np.uint32)
else: else:
assert (self.ref_obj_layer_pipe is not None) assert (self.ref_obj_layer_pipe is not None)
self.ref_obj_layer_pipe[-1].PropagateRequestedRegion('out', r) tmp_er = otb.Registry.CreateApplication('ExtractROI')
tile_obj = self.ref_obj_layer_pipe[-1].GetImageAsNumpyArray('out').astype(np.uint32) tmp_er.SetParameterInputImage('in', self.ref_obj_layer_pipe[-1].GetParameterOutputImage('out'))
tmp_er.SetParameterInt('startx', r['index'][0])
#tile_obj = self.ref_obj_layer[r['index'][1]:r['index'][1]+r['size'][1], tmp_er.SetParameterInt('starty', r['index'][1])
# r['index'][0]:r['index'][0]+r['size'][0]] tmp_er.SetParameterInt('sizex', r['size'][0])
tmp_er.SetParameterInt('sizey', r['size'][1])
tmp_er.Execute()
tile_obj = tmp_er.GetImageAsNumpyArray('out').astype(np.uint32)
si = otb.Registry.CreateApplication('Superimpose') si = otb.Registry.CreateApplication('Superimpose')
si.SetParameterString('inm', input_image) si.SetParameterString('inm', input_image)
......
...@@ -30,7 +30,7 @@ def parse_colormap_file(fn): ...@@ -30,7 +30,7 @@ def parse_colormap_file(fn):
return labels, class_names, colors return labels, class_names, colors
def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name=None): def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name=None, importance_perc=0.75):
labels, class_names, colors = parse_colormap_file(palette_fn) labels, class_names, colors = parse_colormap_file(palette_fn)
colors_norm = [(x[0]/255,x[1]/255,x[2]/255,x[3]/255) for x in colors] colors_norm = [(x[0]/255,x[1]/255,x[2]/255,x[3]/255) for x in colors]
with plt.ioff(): with plt.ioff():
...@@ -81,7 +81,7 @@ def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name ...@@ -81,7 +81,7 @@ def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name
imp_s = [x for _, x in sorted(zip(imp_m, imp_s), reverse=True)] imp_s = [x for _, x in sorted(zip(imp_m, imp_s), reverse=True)]
imp_m = sorted(imp_m, reverse=True) imp_m = sorted(imp_m, reverse=True)
c_imp = np.cumsum(imp_m) c_imp = np.cumsum(imp_m)
idx = np.where(c_imp<0.75*c_imp[-1])[0][-1] idx = np.where(c_imp<importance_perc * c_imp[-1])[0][-1]
imp_m = imp_m[:idx] imp_m = imp_m[:idx]
imp_s = imp_s[:idx] imp_s = imp_s[:idx]
imp_n = imp_n[:idx] imp_n = imp_n[:idx]
......
...@@ -49,44 +49,46 @@ def fetch(shp, dt, out_fld, api_key): ...@@ -49,44 +49,46 @@ def fetch(shp, dt, out_fld, api_key):
} }
res = session.get(API_URL, params=parameters) res = session.get(API_URL, params=parameters)
mosaic = res.json() mosaic = res.json()
mosaic_id = mosaic['mosaics'][0]['id'] if len(mosaic['mosaics']) > 0:
mosaic_id = mosaic['mosaics'][0]['id']
search_parameters = {
'bbox': str_bbox, search_parameters = {
'minimal': True 'bbox': str_bbox,
} 'minimal': True
quads_url = "{}/{}/quads".format(API_URL, mosaic_id) }
res = session.get(quads_url, params=search_parameters, stream=True) quads_url = "{}/{}/quads".format(API_URL, mosaic_id)
quads = res.json() res = session.get(quads_url, params=search_parameters, stream=True)
quads = res.json()
l_quads = []
for q in tqdm(quads['items'], desc='Downloading quads for {}'.format(md)): l_quads = []
link = q['_links']['download'] for q in tqdm(quads['items'], desc='Downloading quads for {}'.format(md)):
l_quads.append(os.path.join(out_fld, q['id'] + '.tif')) link = q['_links']['download']
if not os.path.exists(l_quads[-1]): l_quads.append(os.path.join(out_fld, q['id'] + '.tif'))
urllib.request.urlretrieve(link, l_quads[-1]) if not os.path.exists(l_quads[-1]):
urllib.request.urlretrieve(link, l_quads[-1])
app_mos = otb.Registry.CreateApplication('Mosaic')
app_mos.SetParameterStringList('il', l_quads) app_mos = otb.Registry.CreateApplication('Mosaic')
app_mos.Execute() app_mos.SetParameterStringList('il', l_quads)
app_roi = otb.Registry.CreateApplication('ExtractROI') app_mos.Execute()
app_roi.SetParameterInputImage('in', app_mos.GetParameterOutputImage('out')) app_roi = otb.Registry.CreateApplication('ExtractROI')
fn = 'PlanetMosaic_{}_{}.tif'.format(os.path.splitext(os.path.basename(shp))[0], md) app_roi.SetParameterInputImage('in', app_mos.GetParameterOutputImage('out'))
app_roi.SetParameterString('mode', 'fit') fn = 'PlanetMosaic_{}_{}.tif'.format(os.path.splitext(os.path.basename(shp))[0], md)
app_roi.SetParameterString('mode.fit.vect', shp) app_roi.SetParameterString('mode', 'fit')
app_roi.SetParameterString('out', os.path.join(out_fld, fn)) app_roi.SetParameterString('mode.fit.vect', shp)
app_roi.SetParameterOutputImagePixelType('out', otb.ImagePixelType_uint16) app_roi.SetParameterString('out', os.path.join(out_fld, fn))
app_roi.ExecuteAndWriteOutput() app_roi.SetParameterOutputImagePixelType('out', otb.ImagePixelType_uint16)
app_roi.ExecuteAndWriteOutput()
[os.remove(f) for f in l_quads]
[os.remove(f) for f in l_quads]
return
return PlanetMosaicPipeline(out_fld)
class PlanetMosaicPipeline: class PlanetMosaicPipeline:
# --- BEGIN SENSOR PROTOTYPE --- # --- BEGIN SENSOR PROTOTYPE ---
NAME = 'PLANET-MOSAICS' NAME = 'PLANET-MOSAICS'
REF_TYPE = otb.ImagePixelType_uint16 REF_TYPE = otb.ImagePixelType_uint16
FEAT_TYPE = otb.ImagePixelType_float
PTRN_name = 'PlanetMosaic_*.tif' PTRN_name = 'PlanetMosaic_*.tif'
FEAT_exp = { FEAT_exp = {
'B': 'im1b1', 'B': 'im1b1',
...@@ -142,14 +144,23 @@ class PlanetMosaicPipeline: ...@@ -142,14 +144,23 @@ class PlanetMosaicPipeline:
return return
def parse_folder(self, fld): def parse_folder(self, fld):
img_list = [os.path.abspath(x) for x in glob.glob(os.path.join(fld, self.PTRN_dir))] img_list = [os.path.abspath(x) for x in glob.glob(os.path.join(fld, self.PTRN_name))]
return sorted(img_list, key=lambda x: self._img_id(x)) return sorted(img_list, key=lambda x: self._img_id(x))
def append(self, app, fname=None, ftype=None, outp=None, is_output=False):
if is_output:
self.out_idx.append(len(self.pipe))
self.pipe.append(app)
self.files.append(fname)
self.types.append(ftype)
self.out_p.append(outp)
def compute_features(self, feat_list=['B', 'G', 'R', 'NIR', 'NDVI']): def compute_features(self, feat_list=['B', 'G', 'R', 'NIR', 'NDVI']):
proc_idx = self.out_idx.copy() proc_idx = self.out_idx.copy()
self.out_idx = [] self.out_idx = []
for k in range(len(proc_idx)): for k in range(len(proc_idx)):
bm = otb.Registry.CreateApplication('BandMathX') bm = otb.Registry.CreateApplication('BandMathX')
bm.AddImageToParameterInputImageList('il', self.pipe[k].GetParameterOutputImage(self.out_p[k]))
expr = [] expr = []
for f in feat_list: for f in feat_list:
expr.append(self.FEAT_exp[f]) expr.append(self.FEAT_exp[f])
...@@ -157,7 +168,7 @@ class PlanetMosaicPipeline: ...@@ -157,7 +168,7 @@ class PlanetMosaicPipeline:
bm.SetParameterString('exp', expr) bm.SetParameterString('exp', expr)
bm.Execute() bm.Execute()
fn = self.files[proc_idx[k]].replace('.tif', '_FEAT.tif') fn = self.files[proc_idx[k]].replace('.tif', '_FEAT.tif')
self.append(bm, fn, self.REF_TYPE, 'out', is_output=True) self.append(bm, fn, self.FEAT_TYPE, 'out', is_output=True)
def write_outputs(self, fld, update_pipe=False, compress=False): def write_outputs(self, fld, update_pipe=False, compress=False):
out = [] out = []
......
...@@ -7,13 +7,34 @@ import uuid ...@@ -7,13 +7,34 @@ import uuid
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
import shutil import shutil
from itertools import groupby from itertools import groupby
from Common.geotools import get_query_bbox
from eodag import EODataAccessGateway
def fetch(shp, dt, output_fld, credentials):
bbox = get_query_bbox(shp)
dag = EODataAccessGateway(user_conf_file_path=credentials)
dag.set_preferred_provider("peps")
search_criteria = {
"productType": "S1_SAR_GRD",
"start": dt.split("/")[0],
"end": dt.split("/")[1],
"geom": {"lonmin": bbox[0], "latmin": bbox[1], "lonmax": bbox[2], "latmax": bbox[3]}
}
res = dag.search_all(**search_criteria)
res.filter_property(sensorMode='IW')
if len(res) > 0:
os.makedirs(output_fld, exist_ok=True)
dag.download_all(res, outputs_prefix=output_fld, extract=True)
return S1GRDPipeline(output_fld)
class S1GRDPipeline: class S1GRDPipeline:
# --- BEGIN SENSOR PROTOTYPE --- # --- BEGIN SENSOR PROTOTYPE ---
NAME = 'S1-IW-GRD' NAME = 'S1-IW-GRD'
VAL_TYPE = otb.ImagePixelType_int16 VAL_TYPE = otb.ImagePixelType_int16
TMP_TYPE = otb.ImagePixelType_float TMP_TYPE = otb.ImagePixelType_float
PTRN_dir = 'S1*_IW_GRD*.SAFE' PTRN_dir = 'S1*_IW_GRD*/S1*_IW_GRD*.SAFE'
PTRN_ref = '-iw-grd-' PTRN_ref = '-iw-grd-'
VH_name = 'vh' VH_name = 'vh'
PTRN = ['measurement/s1*-iw-grd-vh-*.tiff', 'measurement/s1*-iw-grd-vv-*.tiff'] PTRN = ['measurement/s1*-iw-grd-vh-*.tiff', 'measurement/s1*-iw-grd-vv-*.tiff']
......
import os.path import os.path
from TimeSeries.s1base import * from TimeSeries.s1base import *
import planetary_computer as PC import planetary_computer as PC
from pystac_client import Client from pystac_client import Client
from osgeo import ogr
from pyproj import Transformer as T from pyproj import Transformer as T
from shapely.geometry import Polygon from shapely.geometry import Polygon
import rasterio import rasterio
...@@ -11,24 +9,12 @@ import rasterio.mask ...@@ -11,24 +9,12 @@ import rasterio.mask
import tqdm import tqdm
import otbApplication as otb import otbApplication as otb
import json import json
from Common.geotools import get_query_bbox
def fetch(shp, dt, output_fld, api_key, direction=None): def fetch(shp, dt, output_fld, api_key, direction=None):
os.environ['PC_SDK_SUBSCRIPTION_KEY'] = api_key os.environ['PC_SDK_SUBSCRIPTION_KEY'] = api_key
ds = ogr.Open(shp) bbox, i_bbox, shp_srs = get_query_bbox(shp, return_all=True)
ly = ds.GetLayer(0)
xt = ly.GetExtent()
shp_srs = int(ly.GetSpatialRef().GetAuthorityCode(None))
qry_srs = 4326
i_bbox = [xt[0], xt[2], xt[1], xt[3]]
if shp_srs == qry_srs:
bbox = i_bbox
else:
tr = T.from_crs(shp_srs, qry_srs, always_xy=True)
lon_min, lat_min = tr.transform(xt[0], xt[2])
lon_max, lat_max = tr.transform(xt[1], xt[3])
bbox = [lon_min, lat_min, lon_max, lat_max]
api = Client.open('https://planetarycomputer.microsoft.com/api/stac/v1', modifier=PC.sign_inplace) api = Client.open('https://planetarycomputer.microsoft.com/api/stac/v1', modifier=PC.sign_inplace)
res = api.search(collections="sentinel-1-rtc", bbox=bbox, datetime=dt) res = api.search(collections="sentinel-1-rtc", bbox=bbox, datetime=dt)
......
import warnings
from TimeSeries.s2sen2cor import * from TimeSeries.s2sen2cor import *
import planetary_computer as PC import planetary_computer as PC
from pystac_client import Client from pystac_client import Client
import rasterio import rasterio
import rasterio.mask import rasterio.mask
from osgeo import ogr from Common.geotools import get_query_bbox
from pyproj import Transformer as T from pyproj import Transformer as T
from shapely.geometry import Polygon from shapely.geometry import Polygon
import tqdm import tqdm
def fetch(shp, dt, output_fld, band_list=None): def fetch(shp, dt, output_fld, band_list=None):
ds = ogr.Open(shp) bbox, i_bbox, shp_srs = get_query_bbox(shp, return_all=True)
ly = ds.GetLayer(0)
xt = ly.GetExtent()
shp_srs = int(ly.GetSpatialRef().GetAuthorityCode(None))
qry_srs = 4326
i_bbox = [xt[0], xt[2], xt[1], xt[3]]
if shp_srs == qry_srs:
bbox = i_bbox
else:
tr = T.from_crs(shp_srs,qry_srs,always_xy=True)
lon_min, lat_min = tr.transform(xt[0], xt[2])
lon_max, lat_max = tr.transform(xt[1], xt[3])
bbox = [lon_min, lat_min, lon_max, lat_max]
api = Client.open('https://planetarycomputer.microsoft.com/api/stac/v1', modifier=PC.sign_inplace) api = Client.open('https://planetarycomputer.microsoft.com/api/stac/v1', modifier=PC.sign_inplace)
res = api.search(collections="sentinel-2-l2a", bbox=bbox, datetime=dt) res = api.search(collections="sentinel-2-l2a", bbox=bbox, datetime=dt)
......
import sys
import warnings import warnings
from osgeo import gdal
from osgeo import gdal,ogr
import otbApplication as otb import otbApplication as otb
from theia_picker import TheiaCatalog from theia_picker import TheiaCatalog
from pyproj import Transformer as T
import tqdm
from Common.otb_numpy_proc import to_otb_pipeline from Common.otb_numpy_proc import to_otb_pipeline
import numpy as np import numpy as np
...@@ -17,25 +13,11 @@ from osgeo import osr ...@@ -17,25 +13,11 @@ from osgeo import osr
import datetime import datetime
import uuid import uuid
import shutil import shutil
from Common.geotools import get_query_bbox
from Common.geometry import get_displacements_to_ref, get_clearest_central_image from Common.geometry import get_displacements_to_ref, get_clearest_central_image
def fetch(shp, dt, output_fld, credentials): def fetch(shp, dt, output_fld, credentials):
bbox = get_query_bbox(shp)
ds = ogr.Open(shp)
ly = ds.GetLayer(0)
xt = ly.GetExtent()
shp_srs = int(ly.GetSpatialRef().GetAuthorityCode(None))
qry_srs = 4326
i_bbox = [xt[0], xt[2], xt[1], xt[3]]
if shp_srs == qry_srs:
bbox = i_bbox
else:
tr = T.from_crs(shp_srs,qry_srs,always_xy=True)
lon_min, lat_min = tr.transform(xt[0], xt[2])
lon_max, lat_max = tr.transform(xt[1], xt[3])
bbox = [lon_min, lat_min, lon_max, lat_max]
theia = TheiaCatalog(credentials) theia = TheiaCatalog(credentials)
features = theia.search( features = theia.search(
...@@ -48,11 +30,8 @@ def fetch(shp, dt, output_fld, credentials): ...@@ -48,11 +30,8 @@ def fetch(shp, dt, output_fld, credentials):
lst = ['FRE_B2','FRE_B3','FRE_B4','FRE_B5','FRE_B6','FRE_B7','FRE_B8','FRE_B8A', lst = ['FRE_B2','FRE_B3','FRE_B4','FRE_B5','FRE_B6','FRE_B7','FRE_B8','FRE_B8A',
'FRE_B11','FRE_B12','EDG_R1','SAT_R1','CLM_R1'] 'FRE_B11','FRE_B12','EDG_R1','SAT_R1','CLM_R1']
#prg = tqdm.tqdm(total=len(features) * 13, desc="Fetching from Theia")
for f in features: for f in features:
f.download_files(matching=lst, download_dir=output_fld) f.download_files(matching=lst, download_dir=output_fld)
#prg.update()
#prg.close()
return S2TheiaPipeline(output_fld) return S2TheiaPipeline(output_fld)
......
...@@ -115,7 +115,8 @@ RUN pip install gdal==3.4.2 \ ...@@ -115,7 +115,8 @@ RUN pip install gdal==3.4.2 \
planetary_computer \ planetary_computer \
theia_picker \ theia_picker \
fpdf2 \ fpdf2 \
planet planet \
eodag
RUN mkdir moringav2 RUN mkdir moringav2
COPY --chown=ubuntu . /home/ubuntu/moringav2 COPY --chown=ubuntu . /home/ubuntu/moringav2
\ No newline at end of file
...@@ -3,15 +3,18 @@ import sys ...@@ -3,15 +3,18 @@ import sys
import argparse import argparse
import OBIA.segmentation import OBIA.segmentation
import VHR.vhrbase import VHR.vhrbase
from TimeSeries import s2theia, s2planetary from TimeSeries import s2theia, s2planetary, s1base, s1planetary
def run_segmentation(img, threshold, cw, sw , out_seg, def run_segmentation(img, threshold, cw, sw , out_seg,
n_first_iter, margin, roi, n_proc, memory, n_first_iter, margin, roi, n_proc, memory,
remove_graph, force_parallel): remove_graph, force_parallel, light):
if not os.path.exists(os.path.dirname(out_seg)): if not os.path.exists(os.path.dirname(out_seg)):
os.makedirs(os.path.dirname(out_seg)) os.makedirs(os.path.dirname(out_seg))
params = OBIA.segmentation.LSGRMParams(threshold, cw, sw, n_first_iter, margin) params = OBIA.segmentation.LSGRMParams(threshold, cw, sw, n_first_iter, margin)
OBIA.segmentation.lsgrm(img, params, out_seg, n_proc, memory, roi, remove_graph, force_parallel) if light:
OBIA.segmentation.lsgrm_light(img, params, out_seg, n_proc, memory, roi, force_parallel)
else:
OBIA.segmentation.lsgrm(img, params, out_seg, n_proc, memory, roi, remove_graph, force_parallel)
return return
def preprocess_spot67(in_fld, out_fld, dem_fld, geoid_file, skip_ps, compress, def preprocess_spot67(in_fld, out_fld, dem_fld, geoid_file, skip_ps, compress,
...@@ -30,8 +33,7 @@ def preprocess_spot67(in_fld, out_fld, dem_fld, geoid_file, skip_ps, compress, ...@@ -30,8 +33,7 @@ def preprocess_spot67(in_fld, out_fld, dem_fld, geoid_file, skip_ps, compress,
return return
def preprocess_s2(in_fld, out_fld, output_dates_file=None, roi=None, def preprocess_s2(in_fld, out_fld, output_dates_file=None, roi=None,
align=False, align_to=None, align_to_band=3, align_using_band=3, align_to=None, align_to_band=3, align_using_band=3, provider='theia'):
provider='theia'):
S2Processor = None S2Processor = None
if provider == 'theia': if provider == 'theia':
S2Processor = s2theia.S2TheiaPipeline S2Processor = s2theia.S2TheiaPipeline
...@@ -48,26 +50,50 @@ def preprocess_s2(in_fld, out_fld, output_dates_file=None, roi=None, ...@@ -48,26 +50,50 @@ def preprocess_s2(in_fld, out_fld, output_dates_file=None, roi=None,
else: else:
raise ValueError("Please provide path to a text file containing output dates.") raise ValueError("Please provide path to a text file containing output dates.")
align_flag = align is not None align = align_to is not None
s2.extract_feature_set(out_fld, store_gapfill=True, mosaicking='vrt', align=align_flag, align_to=align_to, if (align_to == 'self'):
align_to_band=align_to_band, align_using_band=align_using_band, output_aligned=align) align_to = None
s2.extract_feature_set(out_fld, store_gapfill=True, mosaicking='vrt', align=align,
align_to=align_to, align_to_band=align_to_band, align_using_band=align_using_band)
return
def preprocess_s1(in_fld, roi, out_fld, dem_fld=None, geoid=None, direction=None, satellite=None,
skip_despeckle=False, provider='native'):
S1processor = None
if provider == 'native':
S1Processor = s1base.S1GRDPipeline
elif provider == 'planetary':
S1Processor = s1planetary.S1RTCPipeline
else:
raise ValueError("Unsupported/non-valid provider")
s1 = S1Processor(in_fld, roi, out_fld, direction=direction, satellite=satellite)
if provider == 'native':
assert(os.path.exists(dem_fld) and os.path.exists(geoid))
s1.calibrate()
s1.orthorectify(dem_fld, geoid)
s1.stitch()
if not skip_despeckle:
s1.multitemp_speckle_filter()
s1.compute_features()
s1.write_outputs(out_fld)
return return
def main(args): def main(args):
parser = argparse.ArgumentParser(prog="moringa", add_help=False) parser = argparse.ArgumentParser(prog="moringa", add_help=False)
subpar = parser.add_subparsers(dest="cmd") subpar = parser.add_subparsers(dest="cmd")
prepr = subpar.add_parser("preprocess_s2", help="Performs Moringa preset time series preprocessing.", prepr = subpar.add_parser("preprocess_s2", help="Performs Moringa preset time series preprocessing for Sentinel-2.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter) formatter_class=argparse.ArgumentDefaultsHelpFormatter)
prepr.add_argument("in_folder", type=str, help="Path to the folder containing (de-zipped) S2 images.") prepr.add_argument("in_folder", type=str, help="Path to the folder containing (de-zipped) S2 images.")
prepr.add_argument("out_folder", type=str, help="Path to the folder in which preprocessed stacks will be stored.") prepr.add_argument("out_folder", type=str, help="Path to the folder in which preprocessed stacks will be stored.")
prepr.add_argument("--output_dates_file", type=str, default=None, help="Path to the text file containing output dates for temporal interpolation.") prepr.add_argument("--output_dates_file", type=str, default=None, help="Path to the text file containing output dates for temporal interpolation.")
prepr.add_argument("--roi", type=str, default=None, help="Path to the ROI vector file.") prepr.add_argument("--roi", type=str, default=None, help="Path to the ROI vector file.")
prepr.add_argument("--align", type=str, default=None, help="To perform within-series image alignment, set this as output path for the aligned series.") prepr.add_argument("--align_to", type=str, default=None, help="Optional strategy for S2 spatial alignment (self/<date>/path to reference image).")
prepr.add_argument("--align_to", type=str, default=None, help="Path to a (optional)) reference image to which the stacks must be aligned.") prepr.add_argument("--align_to_band", type=int, default=3, help="Band of reference image used for co-registration.")
prepr.add_argument("--align_to_band", type=int, default=1, help="Band of reference image used for alignment.") prepr.add_argument("--align_using_band", type=int, default=3, help="Band of current stack used for co-registration.")
prepr.add_argument("--align_using_band", type=int, default=3, help="Band of current stack used for alignment.") prepr.add_argument("--provider", type=str, default='theia', help="S2 image provider. Currently supported: 'theia', 'planetary'")
prepr.add_argument("--provider", type=str, default='theia', help="S2 image provider. Supported: 'theia', 'theial3a', 'sen2cor', 'planetary'")
segmt = subpar.add_parser("segment", help="Performs (large scale Baatz-Shape) segmentation of an input image.", segmt = subpar.add_parser("segment", help="Performs (large scale Baatz-Shape) segmentation of an input image.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter) formatter_class=argparse.ArgumentDefaultsHelpFormatter)
...@@ -76,11 +102,12 @@ def main(args): ...@@ -76,11 +102,12 @@ def main(args):
segmt.add_argument("outimg", type=str, help="Path to the output segmentation file (.tif, .shp, .gpkg, .gml).") segmt.add_argument("outimg", type=str, help="Path to the output segmentation file (.tif, .shp, .gpkg, .gml).")
segmt.add_argument("--cw", type=float, default=0.5, help="Color weight in Baatz-Shape criterion.") segmt.add_argument("--cw", type=float, default=0.5, help="Color weight in Baatz-Shape criterion.")
segmt.add_argument("--sw", type=float, default=0.5, help="Spatial weight in Baatz-Shape criterion.") segmt.add_argument("--sw", type=float, default=0.5, help="Spatial weight in Baatz-Shape criterion.")
segmt.add_argument("--n_first_iter", type=int, default=12, help="Number of iterations for parallel tile processing.") segmt.add_argument("--n_first_iter", type=int, default=12, help="Number of iterations for parallel tile processing (no use in light mode).")
segmt.add_argument("--tile_margin", type=int, default=100, help="Margin for tile overlap.") segmt.add_argument("--tile_margin", type=int, default=100, help="Margin for tile overlap.")
segmt.add_argument("--roi", type=str, default=None, help="Vector file containing an ROI.") segmt.add_argument("--roi", type=str, default=None, help="Vector file containing an ROI.")
segmt.add_argument("--n_proc", type=int, help="Number of cores to use.") segmt.add_argument("--n_proc", type=int, help="Number of cores to use.")
segmt.add_argument("--mem_limit", type=int, help="Memory limit in MB.") segmt.add_argument("--mem_limit", type=int, help="Memory limit in MB.")
segmt.add_argument("--use_light_alg", help="Use the sub-obtimal version of the algorithm. Faster but may have artefacts.", action='store_true')
segmt.add_argument("--keep_graph", help="Keep the graph files (.bin) after segmentation.", action='store_true') segmt.add_argument("--keep_graph", help="Keep the graph files (.bin) after segmentation.", action='store_true')
segmt.add_argument("--force_parallel", help="Force the spot6/7 preprocess one-liner parallelization of the process even if the full graph fits in memory.", action='store_true') segmt.add_argument("--force_parallel", help="Force the spot6/7 preprocess one-liner parallelization of the process even if the full graph fits in memory.", action='store_true')
...@@ -97,6 +124,19 @@ def main(args): ...@@ -97,6 +124,19 @@ def main(args):
vhrprep.add_argument("--skip_ps", help="Skip pansharpening step.", action='store_true') vhrprep.add_argument("--skip_ps", help="Skip pansharpening step.", action='store_true')
vhrprep.add_argument("--compress", help="Use lossless compression on outputs.", action='store_true') vhrprep.add_argument("--compress", help="Use lossless compression on outputs.", action='store_true')
s1prepr = subpar.add_parser("preprocess_s1", help="Performs Moringa preset time series preprocessing for Sentinel-1.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
s1prepr.add_argument("in_folder", type=str, help="Path to the folder containing (de-zipped) S1 images.")
s1prepr.add_argument("roi", type=str, default=None, help="Path to an image whose geometry has to be fit (e.g. a S2 scene).")
s1prepr.add_argument("out_folder", type=str, help="Path to the folder in which preprocessed stacks will be stored.")
s1prepr.add_argument("--dem_fld", type=str, default=None, help="Path to the folder containing DEM covering the scene in WGS84 projection.")
s1prepr.add_argument("--geoid", type=str, default=None, help="Path to the geoid file.")
s1prepr.add_argument("--direction", type=str, default=None, help="Filter direction (ascending/descending)")
s1prepr.add_argument("--satellite", type=str, default=None, help="Filter satellite (s1a/s1b)")
s1prepr.add_argument("--skip_despeckle", help="Skip despeckling step", action='store_true')
s1prepr.add_argument("--provider", type=str, default='native',
help="S1 image provider. Currently supported: 'native' (e.g. esa/peps), 'planetary'")
if len(args) == 1: if len(args) == 1:
parser.print_help() parser.print_help()
sys.exit(0) sys.exit(0)
...@@ -105,17 +145,21 @@ def main(args): ...@@ -105,17 +145,21 @@ def main(args):
if arg.cmd == "preprocess_s2": if arg.cmd == "preprocess_s2":
preprocess_s2(arg.in_folder, arg.out_folder, output_dates_file=arg.output_dates_file, roi=arg.roi, preprocess_s2(arg.in_folder, arg.out_folder, output_dates_file=arg.output_dates_file, roi=arg.roi,
align=arg.align, align_to=arg.align_to, align_to_band=arg.align_to_band, align_to=arg.align_to, align_to_band=arg.align_to_band, align_using_band=arg.align_using_band,
align_using_band=arg.align_using_band, provider=arg.provider) provider=arg.provider)
if arg.cmd == "segment": if arg.cmd == "segment":
run_segmentation(arg.img, arg.threshold, arg.cw, arg.sw, arg.outimg, arg.n_first_iter, arg.tile_margin, run_segmentation(arg.img, arg.threshold, arg.cw, arg.sw, arg.outimg, arg.n_first_iter, arg.tile_margin,
arg.roi, arg.n_proc, arg.mem_limit, not arg.keep_graph, arg.force_parallel) arg.roi, arg.n_proc, arg.mem_limit, not arg.keep_graph, arg.force_parallel, arg.use_light_alg)
if arg.cmd == "preprocess_spot67": if arg.cmd == "preprocess_spot67":
preprocess_spot67(arg.fld, arg.out_fld, arg.dem_fld, arg.geoid, arg.skip_ps, arg.compress, preprocess_spot67(arg.fld, arg.out_fld, arg.dem_fld, arg.geoid, arg.skip_ps, arg.compress,
arg.clip, arg.align_to, arg.align_to_band, arg.align_using_band) arg.clip, arg.align_to, arg.align_to_band, arg.align_using_band)
if arg.cmd == "preprocess_s1":
preprocess_s1(arg.in_folder, arg.roi, arg.out_folder, arg.dem_fld, arg.geoid, arg.direction, arg.satellite,
arg.skip_despeckle, arg.provider)
return 0 return 0
if __name__ == "__main__": if __name__ == "__main__":
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment