Commit 471b67a6 authored by Cresson Remi's avatar Cresson Remi
Browse files

FIX, REFAC: use f-string and os.path.join() for paths concat

1 merge request!6Checkpoints callbacks fixes
Showing with 19 additions and 19 deletions
+19 -19
...@@ -436,7 +436,6 @@ class RoisLoader(dict): ...@@ -436,7 +436,6 @@ class RoisLoader(dict):
assert root_dir_key in data assert root_dir_key in data
self.rois_root_dir = data[root_dir_key] self.rois_root_dir = data[root_dir_key]
assert isinstance(self.rois_root_dir, str) assert isinstance(self.rois_root_dir, str)
self.rois_root_dir = system.pathify(self.rois_root_dir)
def get_list(key): def get_list(key):
""" """
...@@ -464,7 +463,7 @@ class RoisLoader(dict): ...@@ -464,7 +463,7 @@ class RoisLoader(dict):
""" """
tiles = {} tiles = {}
for tile in tiles_list: for tile in tiles_list:
roi_file = "{}{}_{}.tif".format(self.rois_root_dir, tile, suffix) roi_file = os.path.join(self.rois_root_dir, f"{tile}_{suffix}.tif")
assert system.file_exists(roi_file) assert system.file_exists(roi_file)
tiles.update({tile: roi_file}) tiles.update({tile: roi_file})
self.update({"roi_{}".format(suffix): tiles}) self.update({"roi_{}".format(suffix): tiles})
...@@ -21,9 +21,9 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ...@@ -21,9 +21,9 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE. DEALINGS IN THE SOFTWARE.
""" """
"""Classes for Tensorflow summaries""" """Classes for Tensorflow summaries"""
import os
import tensorflow as tf import tensorflow as tf
from tensorflow import keras from tensorflow import keras
from decloud.core import system
from decloud.preprocessing import constants from decloud.preprocessing import constants
...@@ -71,7 +71,7 @@ class PreviewsCallback(keras.callbacks.Callback): ...@@ -71,7 +71,7 @@ class PreviewsCallback(keras.callbacks.Callback):
predicted = self.model.predict(self.test_data) predicted = self.model.predict(self.test_data)
# Log the images summary. # Log the images summary.
file_writer = tf.summary.create_file_writer(system.pathify(self.logdir) + 'previews') file_writer = tf.summary.create_file_writer(os.path.join(self.logdir, 'previews'))
with file_writer.as_default(): with file_writer.as_default():
for key in self.target_keys: for key in self.target_keys:
tf.summary.image("predicted: " + key, get_preview_fn(key)(predicted[key]), step=epoch) tf.summary.image("predicted: " + key, get_preview_fn(key)(predicted[key]), step=epoch)
......
...@@ -21,6 +21,7 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER ...@@ -21,6 +21,7 @@ FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE. DEALINGS IN THE SOFTWARE.
""" """
"""Classes for Sentinel images access""" """Classes for Sentinel images access"""
import os
import datetime import datetime
import itertools import itertools
import json import json
...@@ -70,9 +71,9 @@ def s1_filename_to_md(filename): ...@@ -70,9 +71,9 @@ def s1_filename_to_md(filename):
metadata = dict() metadata = dict()
splits = basename.split("_") splits = basename.split("_")
if len(splits) != 7: if len(splits) != 7:
raise Exception("{} not a S1 image (wrong number of splits between \"_\" in filename)".format(filename)) raise Exception(f"{filename} not a S1 image (wrong number of splits between \"_\" in filename)")
if len(splits[5]) < 15: if len(splits[5]) < 15:
raise Exception("{} not a S1 archive (wrong date format)".format(filename)) raise Exception(f"{filename} not a S1 archive (wrong date format)")
date_str = splits[5][:15] date_str = splits[5][:15]
metadata["tile"] = splits[1] metadata["tile"] = splits[1]
if date_str[9:15] == "xxxxxx": if date_str[9:15] == "xxxxxx":
...@@ -110,7 +111,7 @@ def create_s1_image(vvvh_gtiff, ref_patchsize, patchsize_10m): ...@@ -110,7 +111,7 @@ def create_s1_image(vvvh_gtiff, ref_patchsize, patchsize_10m):
pth = system.dirname(vvvh_gtiff) pth = system.dirname(vvvh_gtiff)
# Compute stats # Compute stats
edge_stats_fn = system.pathify(pth) + system.new_bname(metadata["filename"], constants.SUFFIX_STATS_S1) edge_stats_fn = os.path.join(pth, system.new_bname(metadata["filename"], constants.SUFFIX_STATS_S1))
compute_patches_stats(image=metadata["filename"], output_stats=edge_stats_fn, expr="im1b1==0&&im1b2==0", compute_patches_stats(image=metadata["filename"], output_stats=edge_stats_fn, expr="im1b1==0&&im1b2==0",
patchsize=ref_patchsize) patchsize=ref_patchsize)
...@@ -145,7 +146,7 @@ def s2_filename_to_md(filename): ...@@ -145,7 +146,7 @@ def s2_filename_to_md(filename):
metadata = dict() metadata = dict()
splits = basename.split("_") splits = basename.split("_")
if len(splits) < 4: if len(splits) < 4:
raise Exception("{} might not be a S2 product".format(filename)) raise Exception(f"{filename} might not be a S2 product")
metadata["tile"] = splits[3] metadata["tile"] = splits[3]
datestr = splits[1] datestr = splits[1]
metadata["date"] = datetime.datetime.strptime(datestr[:-1], '%Y%m%d-%H%M%S-%f') metadata["date"] = datetime.datetime.strptime(datestr[:-1], '%Y%m%d-%H%M%S-%f')
...@@ -180,7 +181,7 @@ def compute_patches_stats(image, output_stats, patchsize, expr=""): ...@@ -180,7 +181,7 @@ def compute_patches_stats(image, output_stats, patchsize, expr=""):
raise Exception("\"image\" must be of type str, if no expr is provided!") raise Exception("\"image\" must be of type str, if no expr is provided!")
app.SetParameterString("in", image) app.SetParameterString("in", image)
app.SetParameterInt("patchsize", patchsize) app.SetParameterInt("patchsize", patchsize)
app.SetParameterString("out", "{}?&gdal:co:COMPRESS=DEFLATE".format(output_stats)) app.SetParameterString("out", f"{output_stats}?&gdal:co:COMPRESS=DEFLATE")
app.SetParameterOutputImagePixelType("out", otbApplication.ImagePixelType_uint16) app.SetParameterOutputImagePixelType("out", otbApplication.ImagePixelType_uint16)
app.ExecuteAndWriteOutput() app.ExecuteAndWriteOutput()
system.declare_complete(output_stats) system.declare_complete(output_stats)
...@@ -216,7 +217,7 @@ def create_s2_image_from_dir(s2_product_dir, ref_patchsize, patchsize_10m, with_ ...@@ -216,7 +217,7 @@ def create_s2_image_from_dir(s2_product_dir, ref_patchsize, patchsize_10m, with_
# Check that files exists # Check that files exists
def _check(title, filename): def _check(title, filename):
if filename is None: if filename is None:
raise Exception("File for {} does not exist in product {}".format(title, s2_product_dir)) raise Exception(f"File for {title} does not exist in product {s2_product_dir}")
_check("edge mask", edg_mask) _check("edge mask", edg_mask)
_check("cloud mask", cld_mask) _check("cloud mask", cld_mask)
...@@ -231,13 +232,13 @@ def create_s2_image_from_dir(s2_product_dir, ref_patchsize, patchsize_10m, with_ ...@@ -231,13 +232,13 @@ def create_s2_image_from_dir(s2_product_dir, ref_patchsize, patchsize_10m, with_
logging.debug("\t20m bands: %s", b20m_imgs) logging.debug("\t20m bands: %s", b20m_imgs)
# Compute stats # Compute stats
clouds_stats_fn = system.pathify(s2_product_dir) + system.new_bname(cld_mask, constants.SUFFIX_STATS_S2) clouds_stats_fn = os.path.join(s2_product_dir, system.new_bname(cld_mask, constants.SUFFIX_STATS_S2))
edge_stats_fn = system.pathify(s2_product_dir) + system.new_bname(edg_mask, constants.SUFFIX_STATS_S2) edge_stats_fn = os.path.join(s2_product_dir, system.new_bname(edg_mask, constants.SUFFIX_STATS_S2))
compute_patches_stats(image=cld_mask, output_stats=clouds_stats_fn, expr="im1b1>0", patchsize=ref_patchsize) compute_patches_stats(image=cld_mask, output_stats=clouds_stats_fn, expr="im1b1>0", patchsize=ref_patchsize)
compute_patches_stats(image=edg_mask, output_stats=edge_stats_fn, patchsize=ref_patchsize) compute_patches_stats(image=edg_mask, output_stats=edge_stats_fn, patchsize=ref_patchsize)
# Return a s2 image class # Return a s2 image class
metadata = s2_filename_to_md(system.pathify(s2_product_dir)) metadata = s2_filename_to_md(s2_product_dir)
return S2Image(acq_date=metadata["date"], return S2Image(acq_date=metadata["date"],
edge_stats_fn=edge_stats_fn, edge_stats_fn=edge_stats_fn,
bands_10m_fn=b10m_imgs, bands_10m_fn=b10m_imgs,
...@@ -333,7 +334,7 @@ class AbstractImage(ABC): ...@@ -333,7 +334,7 @@ class AbstractImage(ABC):
:return: a numpy array :return: a numpy array
""" """
if key not in self.patch_sources: if key not in self.patch_sources:
raise Exception("Key {} not in patches sources. Available sources keys: {}".format(key, self.patch_sources)) raise Exception(f"Key {key} not in patches sources. Available sources keys: {self.patch_sources}")
return self.patch_sources[key].get(patch_location=patch_location) return self.patch_sources[key].get(patch_location=patch_location)
@abstractmethod @abstractmethod
...@@ -917,7 +918,7 @@ class TileHandler: ...@@ -917,7 +918,7 @@ class TileHandler:
new_sample['geoinfo'] = \ new_sample['geoinfo'] = \
src.patch_sources[KEY_S2_BANDS_10M].get_geographic_info(patch_location=tuple_pos) src.patch_sources[KEY_S2_BANDS_10M].get_geographic_info(patch_location=tuple_pos)
else: else:
raise Exception("Unknown key {}!".format(sx_key)) raise Exception(f"Unknown key {sx_key}!")
src_dict = src.get(patch_location=tuple_pos) src_dict = src.get(patch_location=tuple_pos)
for src_key, src_np_arr in src_dict.items(): for src_key, src_np_arr in src_dict.items():
# the final key is composed in concatenating key, "_", src_key # the final key is composed in concatenating key, "_", src_key
...@@ -971,7 +972,7 @@ class TilesLoader(dict): ...@@ -971,7 +972,7 @@ class TilesLoader(dict):
if key in data: if key in data:
value = data[key] value = data[key]
assert isinstance(value, str) assert isinstance(value, str)
return system.pathify(value) return value
return None return None
# Paths # Paths
...@@ -980,12 +981,12 @@ class TilesLoader(dict): ...@@ -980,12 +981,12 @@ class TilesLoader(dict):
self.dem_tiles_root_dir = get_pth("DEM_ROOT_DIR") self.dem_tiles_root_dir = get_pth("DEM_ROOT_DIR")
if self.s2_tiles_root_dir is None: if self.s2_tiles_root_dir is None:
raise Exception("S2_ROOT_DIR key not found in {}".format(the_json)) raise Exception(f"S2_ROOT_DIR key not found in {the_json}")
# Tiles list # Tiles list
self.tiles_list = data["TILES"] self.tiles_list = data["TILES"]
if self.tiles_list is None: if self.tiles_list is None:
raise Exception("TILES key not found in {}".format(the_json)) raise Exception(f"TILES key not found in {the_json}")
if not isinstance(self.tiles_list, list): if not isinstance(self.tiles_list, list):
raise Exception("TILES value must be a list of strings!") raise Exception("TILES value must be a list of strings!")
...@@ -995,7 +996,7 @@ class TilesLoader(dict): ...@@ -995,7 +996,7 @@ class TilesLoader(dict):
def _get_tile_pth(root_dir, current_tile=tile): def _get_tile_pth(root_dir, current_tile=tile):
""" Returns the directory for the current tile """ """ Returns the directory for the current tile """
if root_dir is not None: if root_dir is not None:
return root_dir + current_tile return os.path.join(root_dir, current_tile)
return None return None
s1_dir = _get_tile_pth(self.s1_tiles_root_dir) s1_dir = _get_tile_pth(self.s1_tiles_root_dir)
......
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment