Commit 6384bbd2 authored by Gaetano Raffaele's avatar Gaetano Raffaele
Browse files

ENH: started coding the basic Moringa workflow.

parent bd8f2a08
No related merge requests found
Showing with 113 additions and 16 deletions
+113 -16
...@@ -9,12 +9,11 @@ from sklearn.ensemble import RandomForestClassifier ...@@ -9,12 +9,11 @@ from sklearn.ensemble import RandomForestClassifier
from sklearn.metrics import confusion_matrix, accuracy_score, cohen_kappa_score, precision_recall_fscore_support from sklearn.metrics import confusion_matrix, accuracy_score, cohen_kappa_score, precision_recall_fscore_support
class ObjectBasedClassifier: class ObjectBasedClassifier:
def __init__(self, object_layer, reference_data, time_series_patterns, user_feature_list, ref_class_field='class', def __init__(self, object_layer, reference_data, time_series_list, user_feature_list, ref_class_field='class',
ref_id_field='id'): ref_id_field='id'):
self.obia_base = OBIABase(object_layer, ref_data=reference_data, ref_class_field=ref_class_field, self.obia_base = OBIABase(object_layer, ref_data=reference_data, ref_class_field=ref_class_field,
ref_id_field=ref_id_field) ref_id_field=ref_id_field)
for ptrn in time_series_patterns: for lst in time_series_list:
lst = sorted(glob.glob(ptrn))
self.obia_base.add_raster_time_series_for_stats(lst) self.obia_base.add_raster_time_series_for_stats(lst)
for ras in user_feature_list: for ras in user_feature_list:
self.obia_base.add_raster_for_stats(ras) self.obia_base.add_raster_for_stats(ras)
...@@ -119,17 +118,13 @@ class ObjectBasedClassifier: ...@@ -119,17 +118,13 @@ class ObjectBasedClassifier:
#TEST CODE #TEST CODE
def run_test(sample_folder): def run_test(sample_folder):
lst1 = '{}/output/S2_processed/T31PDL/*/*FEAT.tif'.format(sample_folder)
obc = ObjectBasedClassifier('{}/output/segmentation/segmentation.tif'.format(sample_folder), obc = ObjectBasedClassifier('{}/output/segmentation/segmentation.tif'.format(sample_folder),
'{}/input/REF/ref_l2.shp'.format(sample_folder), '{}/input/REF/ref_l2.shp'.format(sample_folder),
['{}/output/S2_processed/T31PDL/*/*FEAT.tif'.format(sample_folder)], [lst1],
['{}/input/THR/THR_SPOT6.tif'.format(sample_folder)], ['{}/input/THR/THR_SPOT6.tif'.format(sample_folder)],
ref_class_field=['class', 'Class_L1a']) ref_class_field=['class', 'Class_L1a'])
'''
obc = ObjectBasedClassifier('/DATA/Benin/OBSYDYA_data/MORINGA/SEGMENTATION/segmentation.tif',
'/DATA/Benin/OBSYDYA_data/MORINGA/reference/BD_OBSYDYA_2022_ParakouNdali_v0.2.shp',
['/DATA/Benin/OBSYDYA_data/MORINGA/basefolder/FEAT/S2_THEIA_FEAT/S2_THEIA_MOSAIC_*.tif'],
glob.glob('/DATA/Benin/OBSYDYA_data/MORINGA/ext_features'))
'''
obc.gen_k_folds(5, class_field='class') obc.gen_k_folds(5, class_field='class')
m, s, r = obc.train_RF(100, return_true_vs_pred=True) m, s, r = obc.train_RF(100, return_true_vs_pred=True)
obc.classify(m, '{}/output/classification/firstmap_l1.tif'.format(sample_folder)) obc.classify(m, '{}/output/classification/firstmap_l1.tif'.format(sample_folder))
......
...@@ -806,7 +806,7 @@ class S2TheiaPipeline: ...@@ -806,7 +806,7 @@ class S2TheiaPipeline:
t.write_outputs(output_warped, update_pipe=True, flag_nodata=True) t.write_outputs(output_warped, update_pipe=True, flag_nodata=True)
t.gapfill(self.output_dates, store_gapfill) t.gapfill(self.output_dates, store_gapfill)
stack_name = t.generate_feature_stack(feat_list) stack_name = t.generate_feature_stack(feat_list)
out.append(t.write_outputs(out_fld)) out.extend(t.write_outputs(out_fld))
t.reset() t.reset()
if len(self.tiles) > 1 and mosaicking == 'vrt': if len(self.tiles) > 1 and mosaicking == 'vrt':
out_mos = [] out_mos = []
......
import os
import json
import pickle
from moringa import preprocess_s2, run_segmentation
from Learning.ObjectBased import ObjectBasedClassifier
def process_timeseries(oroot, d, ts_lst_pkl):
ts_lst = []
for ts in d['timeseries']:
print('[MORINGA-INFO] : Preprocessing {} from {}'.format(ts['type'], ts['provider']))
if ts['type'] == 's2':
ots = os.path.join(oroot, 'timeseries/' + ts['type'] + ts['provider'])
os.makedirs(ots, exist_ok=True)
ts_lst.append(preprocess_s2(ts['path'],
ots,
roi=d['roi'],
output_dates_file=ts['output_dates_file'],
provider=ts['provider']))
else:
raise ValueError('TimeSeries type not yet supported.')
with open(ts_lst_pkl, 'wb') as ts_save:
pickle.dump(ts_lst, ts_save)
return
def perform_segmentation(ofn, d):
print('[MORINGA-INFO] : Performing segmentation')
os.makedirs(os.path.dirname(ofn), exist_ok=True)
run_segmentation(d['segmentation']['src'],
d['segmentation']['th'],
d['segmentation']['cw'],
d['segmentation']['sw'],
ofn,
n_first_iter=d['segmentation']['n_first_iter'],
margin=d['segmentation']['margin'],
roi=d['roi'],
n_proc=d['segmentation']['n_proc'],
light=d['segmentation']['lightmode'])
return
def train_valid_workflow(seg, ts_lst_pkl, d, m_file):
assert (os.path.exists(seg))
assert (os.path.exists(ts_lst_pkl))
print('[MORINGA-INFO] : Running Training/Validation Workflow')
with open(ts_lst_pkl, 'rb') as ts_save:
ts_lst = pickle.load(ts_save)
obc = ObjectBasedClassifier(seg,
d['ref_db']['path'],
ts_lst,
d['userfeat'],
ref_class_field=d['ref_db']['fields'])
obc.gen_k_folds(5, class_field=d['ref_db']['fields'][-1])
for cf in d['ref_db']['fields']:
if d['training']['classifier'] == 'rf':
m, s, r = obc.train_RF(d['training']['parameters']['n_trees'], class_field=cf, return_true_vs_pred=True)
m_dict = {'model': m, 'results': r, 'summary': s}
os.makedirs(m_file[cf], exist_ok=True)
with open(m_file[cf], 'wb') as mf:
pickle.dump(m_dict, mf)
def basic(cfg, runlevel=1, single_step=False):
os.environ['OTB_LOGGER_LEVEL'] = 'CRITICAL'
with open(cfg,'r') as f:
d = json.load(f)
oroot = os.path.join(d['output_path'], d['chain_name'])
step = runlevel
# Preprocess timeseries
ts_lst_pkl = os.path.join(oroot, 'time_series_list.pkl')
if step == 1:
process_timeseries(oroot, d, ts_lst_pkl)
step += 1
if single_step:
return
# Segmentation
seg = os.path.join(oroot, 'segmentation/{}_obj_layer.tif'.format(d['chain_name']))
if step == 2:
perform_segmentation(seg, d)
step += 1
if single_step:
return
# Training/Validation Workflow
m_file = {}
for cf in d['ref_db']['fields']:
m_file[cf] = os.path.join(oroot, 'model/model_{}.pkl'.format(cf))
if step == 3:
train_valid_workflow(seg, ts_lst_pkl, d, m_file)
step += 1
if single_step:
return
return
...@@ -6,8 +6,8 @@ import VHR.vhrbase ...@@ -6,8 +6,8 @@ import VHR.vhrbase
from TimeSeries import s2theia, s2planetary, s1base, s1planetary, planet_mosaics from TimeSeries import s2theia, s2planetary, s1base, s1planetary, planet_mosaics
def run_segmentation(img, threshold, cw, sw , out_seg, def run_segmentation(img, threshold, cw, sw , out_seg,
n_first_iter, margin, roi, n_proc, memory, n_first_iter, margin, roi, n_proc, memory=None,
remove_graph, force_parallel, light): remove_graph=True, force_parallel=False, light=False):
if not os.path.exists(os.path.dirname(out_seg)): if not os.path.exists(os.path.dirname(out_seg)):
os.makedirs(os.path.dirname(out_seg)) os.makedirs(os.path.dirname(out_seg))
params = OBIA.segmentation.LSGRMParams(threshold, cw, sw, n_first_iter, margin) params = OBIA.segmentation.LSGRMParams(threshold, cw, sw, n_first_iter, margin)
...@@ -15,10 +15,10 @@ def run_segmentation(img, threshold, cw, sw , out_seg, ...@@ -15,10 +15,10 @@ def run_segmentation(img, threshold, cw, sw , out_seg,
OBIA.segmentation.lsgrm_light(img, params, out_seg, n_proc, memory, roi, force_parallel) OBIA.segmentation.lsgrm_light(img, params, out_seg, n_proc, memory, roi, force_parallel)
else: else:
OBIA.segmentation.lsgrm(img, params, out_seg, n_proc, memory, roi, remove_graph, force_parallel) OBIA.segmentation.lsgrm(img, params, out_seg, n_proc, memory, roi, remove_graph, force_parallel)
return return out_seg
def preprocess_spot67(in_fld, out_fld, dem_fld, geoid_file, skip_ps, compress, def preprocess_spot67(in_fld, out_fld, dem_fld, geoid_file, skip_ps, compress,
clip, align_to, align_to_band, align_using_band): clip, align_to=None, align_to_band=3, align_using_band=1):
sp = VHR.vhrbase.SPOT67RasterPipeline(in_fld) sp = VHR.vhrbase.SPOT67RasterPipeline(in_fld)
sp.to_toa() sp.to_toa()
sp.orthorectify(dem_fld, geoid_file) sp.orthorectify(dem_fld, geoid_file)
...@@ -77,7 +77,7 @@ def preprocess_s1(in_fld, roi, out_fld, dem_fld=None, geoid=None, direction=None ...@@ -77,7 +77,7 @@ def preprocess_s1(in_fld, roi, out_fld, dem_fld=None, geoid=None, direction=None
s1.compute_features() s1.compute_features()
return s1.write_outputs(out_fld) return s1.write_outputs(out_fld)
def fetch(imagery, shp, dt, out_fld, auth): def fetch(imagery, shp, dt, out_fld, auth=None):
assert(imagery in ['s2theia', 's2planetary', 's1grd', 's1rtc', 'planetmosaics']) assert(imagery in ['s2theia', 's2planetary', 's1grd', 's1rtc', 'planetmosaics'])
if imagery != 's2planetary' and auth is None: if imagery != 's2planetary' and auth is None:
raise ValueError("Please provide authentication information.") raise ValueError("Please provide authentication information.")
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment