Newer
Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
import os
import json
import pickle
from moringa import preprocess_s2, run_segmentation
from Learning.ObjectBased import ObjectBasedClassifier
def process_timeseries(oroot, d, ts_lst_pkl):
ts_lst = []
for ts in d['timeseries']:
print('[MORINGA-INFO] : Preprocessing {} from {}'.format(ts['type'], ts['provider']))
if ts['type'] == 's2':
ots = os.path.join(oroot, 'timeseries/' + ts['type'] + ts['provider'])
os.makedirs(ots, exist_ok=True)
ts_lst.append(preprocess_s2(ts['path'],
ots,
roi=d['roi'],
output_dates_file=ts['output_dates_file'],
provider=ts['provider']))
else:
raise ValueError('TimeSeries type not yet supported.')
with open(ts_lst_pkl, 'wb') as ts_save:
pickle.dump(ts_lst, ts_save)
return
def perform_segmentation(ofn, d):
print('[MORINGA-INFO] : Performing segmentation')
os.makedirs(os.path.dirname(ofn), exist_ok=True)
run_segmentation(d['segmentation']['src'],
d['segmentation']['th'],
d['segmentation']['cw'],
d['segmentation']['sw'],
ofn,
n_first_iter=d['segmentation']['n_first_iter'],
margin=d['segmentation']['margin'],
roi=d['roi'],
n_proc=d['segmentation']['n_proc'],
light=d['segmentation']['lightmode'])
return
def train_valid_workflow(seg, ts_lst_pkl, d, m_file):
assert (os.path.exists(seg))
assert (os.path.exists(ts_lst_pkl))
print('[MORINGA-INFO] : Running Training/Validation Workflow')
with open(ts_lst_pkl, 'rb') as ts_save:
ts_lst = pickle.load(ts_save)
obc = ObjectBasedClassifier(seg,
d['ref_db']['path'],
ts_lst,
d['userfeat'],
ref_class_field=d['ref_db']['fields'])
obc.gen_k_folds(5, class_field=d['ref_db']['fields'][-1])
for cf in d['ref_db']['fields']:
if d['training']['classifier'] == 'rf':
m, s, r = obc.train_RF(d['training']['parameters']['n_trees'], class_field=cf, return_true_vs_pred=True)
m_dict = {'model': m, 'results': r, 'summary': s}
os.makedirs(m_file[cf], exist_ok=True)
with open(m_file[cf], 'wb') as mf:
pickle.dump(m_dict, mf)
def basic(cfg, runlevel=1, single_step=False):
os.environ['OTB_LOGGER_LEVEL'] = 'CRITICAL'
with open(cfg,'r') as f:
d = json.load(f)
oroot = os.path.join(d['output_path'], d['chain_name'])
step = runlevel
# Preprocess timeseries
ts_lst_pkl = os.path.join(oroot, 'time_series_list.pkl')
if step == 1:
process_timeseries(oroot, d, ts_lst_pkl)
step += 1
if single_step:
return
# Segmentation
seg = os.path.join(oroot, 'segmentation/{}_obj_layer.tif'.format(d['chain_name']))
if step == 2:
perform_segmentation(seg, d)
step += 1
if single_step:
return
# Training/Validation Workflow
m_file = {}
for cf in d['ref_db']['fields']:
m_file[cf] = os.path.join(oroot, 'model/model_{}.pkl'.format(cf))
if step == 3:
train_valid_workflow(seg, ts_lst_pkl, d, m_file)
step += 1
if single_step:
return
return