Source

Target

Commits (3)
Showing with 35 additions and 17 deletions
+35 -17
...@@ -93,7 +93,7 @@ class ObjectBasedClassifier: ...@@ -93,7 +93,7 @@ class ObjectBasedClassifier:
assert('folds' in self.training_base.keys()) assert('folds' in self.training_base.keys())
models = [] models = []
results = [] results = []
yt_yp = [] truelabs = np.array([])
for tr_i, ts_i in tqdm(self.training_base['folds'], desc='Training'): for tr_i, ts_i in tqdm(self.training_base['folds'], desc='Training'):
models.append(RandomForestClassifier(n_estimators=n_estimators)) models.append(RandomForestClassifier(n_estimators=n_estimators))
models[-1].fit(self.training_base['X'][tr_i], self.training_base[class_field][tr_i]) models[-1].fit(self.training_base['X'][tr_i], self.training_base[class_field][tr_i])
...@@ -102,6 +102,7 @@ class ObjectBasedClassifier: ...@@ -102,6 +102,7 @@ class ObjectBasedClassifier:
c = np.delete(c, np.isin(l, self.training_base['dummy_ids'])) c = np.delete(c, np.isin(l, self.training_base['dummy_ids']))
l = np.delete(l, np.isin(l, self.training_base['dummy_ids'])) l = np.delete(l, np.isin(l, self.training_base['dummy_ids']))
y_true, y_pred = self.obia_base.true_pred_bypixel(l, c, class_field) y_true, y_pred = self.obia_base.true_pred_bypixel(l, c, class_field)
truelabs = np.unique(np.concatenate((truelabs,y_true,y_pred)))
results.append( results.append(
{ {
'conf_matrix': confusion_matrix(y_true, y_pred, labels=np.unique(self.training_base[class_field])), 'conf_matrix': confusion_matrix(y_true, y_pred, labels=np.unique(self.training_base[class_field])),
...@@ -127,7 +128,8 @@ class ObjectBasedClassifier: ...@@ -127,7 +128,8 @@ class ObjectBasedClassifier:
'f1_mean': np.mean([x['p_r_f1'][2] for x in results], axis=0), 'f1_mean': np.mean([x['p_r_f1'][2] for x in results], axis=0),
'f1_std': np.std([x['p_r_f1'][2] for x in results], axis=0), 'f1_std': np.std([x['p_r_f1'][2] for x in results], axis=0),
'importance_mean': {k:v for k, v in zip(self.obia_base.get_vars(), np.mean(all_imp, axis=0))}, 'importance_mean': {k:v for k, v in zip(self.obia_base.get_vars(), np.mean(all_imp, axis=0))},
'importance_std': {k:v for k, v in zip(self.obia_base.get_vars(), np.std(all_imp, axis=0))} 'importance_std': {k:v for k, v in zip(self.obia_base.get_vars(), np.std(all_imp, axis=0))},
'actual_labels': list(truelabs)
} }
return models, summary, results return models, summary, results
......
...@@ -92,9 +92,12 @@ class OBIABase: ...@@ -92,9 +92,12 @@ class OBIABase:
ref_ol.Execute() ref_ol.Execute()
self.ref_obj_layer_pipe = [in_seg, ras_id, ref_ol] self.ref_obj_layer_pipe = [in_seg, ras_id, ref_ol]
self.ref_db = None
'''
self.ref_db = pd.DataFrame(data=[], self.ref_db = pd.DataFrame(data=[],
columns=['area', 'orig_label', 'polygon_id'] + class_field, columns=['area', 'orig_label', 'polygon_id'] + class_field,
index=[]) index=[])
'''
r = otb.itkRegion() r = otb.itkRegion()
for tn, t in tqdm(self.tiles.items(), desc='Init. Ref. DB', total=len(self.tiles)): for tn, t in tqdm(self.tiles.items(), desc='Init. Ref. DB', total=len(self.tiles)):
r['index'][0], r['index'][1] = t[0], t[1] r['index'][0], r['index'][1] = t[0], t[1]
...@@ -104,14 +107,19 @@ class OBIABase: ...@@ -104,14 +107,19 @@ class OBIABase:
tile_ref_ol = ref_ol.GetImageAsNumpyArray('out').astype(np.int32) tile_ref_ol = ref_ol.GetImageAsNumpyArray('out').astype(np.int32)
tile_int_img = intensity_img.GetVectorImageAsNumpyArray('out').astype(int) tile_int_img = intensity_img.GetVectorImageAsNumpyArray('out').astype(int)
rp = regionprops(tile_ref_ol, intensity_image=tile_int_img) rp = regionprops(tile_ref_ol, intensity_image=tile_int_img)
self.ref_db = pd.concat([ if self.ref_db is None:
self.ref_db, self.ref_db = pd.DataFrame(data=[np.insert(o.intensity_min, 0, o.area) for o in rp if self.obj_to_tile[o.label] == tn],
pd.DataFrame( columns=['area', 'orig_label', 'polygon_id'] + class_field,
data=[np.insert(o.intensity_min, 0, o.area) for o in rp if self.obj_to_tile[o.label] == tn], index=[o.label for o in rp if self.obj_to_tile[o.label] == tn])
columns=self.ref_db.columns, else:
index=[o.label for o in rp if self.obj_to_tile[o.label] == tn] self.ref_db = pd.concat([
)] self.ref_db,
) pd.DataFrame(
data=[np.insert(o.intensity_min, 0, o.area) for o in rp if self.obj_to_tile[o.label] == tn],
columns=self.ref_db.columns,
index=[o.label for o in rp if self.obj_to_tile[o.label] == tn]
)]
)
return return
......
import os import os
def parse_colormap_file(fn): def parse_colormap_file(fn, filter=None):
labels = [] labels = []
colors = [] colors = []
class_names = [] class_names = []
...@@ -8,6 +8,8 @@ def parse_colormap_file(fn): ...@@ -8,6 +8,8 @@ def parse_colormap_file(fn):
with open(fn, 'r') as f: with open(fn, 'r') as f:
for l in f.read().splitlines(): for l in f.read().splitlines():
sl = l.split(' ') sl = l.split(' ')
if filter is not None and int(sl[0]) not in filter:
continue
labels.append(int(sl[0])) labels.append(int(sl[0]))
colors.append((int(sl[1]),int(sl[2]),int(sl[3]),int(sl[4]))) colors.append((int(sl[1]),int(sl[2]),int(sl[3]),int(sl[4])))
class_names.append(' '.join(sl[5:])) class_names.append(' '.join(sl[5:]))
......
...@@ -30,12 +30,12 @@ def filter_and_order_importance(summary, importance_perc, max_num_var=35): ...@@ -30,12 +30,12 @@ def filter_and_order_importance(summary, importance_perc, max_num_var=35):
def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name=None, def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name=None,
importance_perc=0.75, max_variables=35): importance_perc=0.75, max_variables=35):
labels, class_names, colors = parse_colormap_file(palette_fn) labels, class_names, colors = parse_colormap_file(palette_fn, filter=summary['actual_labels'])
colors_norm = [(x[0]/255,x[1]/255,x[2]/255,x[3]/255) for x in colors] colors_norm = [(x[0]/255,x[1]/255,x[2]/255,x[3]/255) for x in colors]
with plt.ioff(): with plt.ioff():
#font = {'weight': 'normal', font = {'weight': 'normal',
# 'size': 8} 'size': 8}
#plt.rc('font', **font) plt.rc('font', **font)
if not os.path.exists(out_dir): if not os.path.exists(out_dir):
os.makedirs(out_dir) os.makedirs(out_dir)
if not isinstance(results, list): if not isinstance(results, list):
...@@ -73,7 +73,7 @@ def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name ...@@ -73,7 +73,7 @@ def generate_report_figures(map, palette_fn, results, summary, out_dir, map_name
plt.tight_layout() plt.tight_layout()
plt.savefig(of['summary'], dpi=300) plt.savefig(of['summary'], dpi=300)
imp_m, imp_s, imp_n = filter_and_order_importance(summary, importance_perc) imp_m, imp_s, imp_n = filter_and_order_importance(summary, importance_perc, max_num_var=max_variables)
fig, ax = plt.subplots() fig, ax = plt.subplots()
ax.barh(range(len(imp_n)), imp_m, xerr=imp_s, align='center') ax.barh(range(len(imp_n)), imp_m, xerr=imp_s, align='center')
...@@ -194,7 +194,7 @@ def generate_pdf(of, out_pdf, name='output'): ...@@ -194,7 +194,7 @@ def generate_pdf(of, out_pdf, name='output'):
def generate_text_report(results, summary, palette_fn, output_fn, name='output'): def generate_text_report(results, summary, palette_fn, output_fn, name='output'):
if os.path.dirname(output_fn) != '': if os.path.dirname(output_fn) != '':
os.makedirs(os.path.dirname(output_fn), exist_ok=True) os.makedirs(os.path.dirname(output_fn), exist_ok=True)
labels, class_names, _ = parse_colormap_file(palette_fn) labels, class_names, _ = parse_colormap_file(palette_fn, filter=summary['actual_labels'])
lines = ['MORINGA Final Report for chain {}, {}'.format(name, datetime.now().strftime('%Y-%m-%d %Hh%M')), ''] lines = ['MORINGA Final Report for chain {}, {}'.format(name, datetime.now().strftime('%Y-%m-%d %Hh%M')), '']
table_lines = [] table_lines = []
......
conda create --name moringa-env -y python=3.10.13
conda activate moringa-env
conda install -y -c conda-forge mamba
mamba install -y -c conda-forge gdal=3.4.2 pyproj rasterio geopandas shapely scikit-learn scikit-image
pip install psutil planetary_computer fpdf2 planet eodag tabulate