diff --git a/L1C_band_composition.py b/L1C_band_composition.py index a8df0f3..710d19d 100644 --- a/L1C_band_composition.py +++ b/L1C_band_composition.py @@ -27,6 +27,8 @@ import os.path as op import otbApplication import find_directory_names +from osgeo import gdal +from osgeo.gdalconst import GA_ReadOnly import glob import json import shutil @@ -115,6 +117,26 @@ def create_specific_indices(in_bands_dir, out_tif, indice_name, resolution=60): print('Please enter a valid indice name') +def create_rgb(bands_dir): + ''' + Create a true color vrt. + ''' + band1 = glob.glob(op.join(bands_dir, '*04.tif'))[0] + band2 = glob.glob(op.join(bands_dir, '*03.tif'))[0] + band3 = glob.glob(op.join(bands_dir, '*02.tif'))[0] + files_list = [band1, band2, band3] + + list_in_vrt = '' + for file_to_add in files_list: + list_in_vrt = list_in_vrt + ' ' + file_to_add + + out_file = op.join(bands_dir, 'RGB.vrt') + + # Create the VRT + build_vrt = 'gdalbuildvrt {} -separate {}'.format(out_file, list_in_vrt) + os.system(build_vrt) + + def create_time_difference_band(global_parameters, band_num, out_tif, resolution=60): ''' Create a TIF being the difference between the cloudy date and the clear date @@ -260,7 +282,7 @@ def dtm_addition(location, out_band, resolution=60): Create the adapted Digital Terrain Model From the original one, change its resolution ''' - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) tile = paths_configuration["tile_location"][location] original_DTM_dir = paths_configuration["global_chains_paths"]["DTM_input"] @@ -283,6 +305,41 @@ def dtm_addition(location, out_band, resolution=60): shutil.copy(resized_DTM_path, out_band) +def slope_addition(global_parameters, resolution=10): + ''' + Resample the slope if not in the required resolution. + ''' + + # Check resolution + in_slope = global_parameters["user_choices"]["slope"] + + dataset = gdal.Open(str(in_slope), GA_ReadOnly) + if dataset is None: + raise Exception("Error reading gis file {}".format(in_slope)) + + # Geographic information + geotransform = dataset.GetGeoTransform() + if geotransform is not None: + pixel_size = (geotransform[1], geotransform[5]) + print " Pixel size :", pixel_size + print " Resolution :", resolution + + # The slope is resized if the slope resoltion is + # not the expected one. + if int(pixel_size[0]) != int(resolution): + + print " Slope to resize from :", pixel_size[0], "to", resolution + out_dir = op.join(global_parameters["user_choices"]["main_dir"], 'Intermediate') + out_slope = op.join(out_dir, op.basename(in_slope).split('.')[0]) + '_resample.' + \ + op.basename(in_slope).split('.')[1] + + resize_band(in_slope, out_slope, resolution, resolution) + else : + out_slope = in_slope + + return out_slope + + def resize_band(in_band, out_band, pixelresX, pixelresY): ''' Resize a band with the given resolution (in meters) @@ -293,7 +350,7 @@ def resize_band(in_band, out_band, pixelresX, pixelresY): os.system(build_warp) -def create_image_compositions(global_parameters, location, current_date, heavy=False, force=False): +def create_image_compositions(global_parameters, location, current_date, unique_image, heavy=False, force=False): # potential_final_tif = op.join(global_parameters["user_choices"]["main_dir"], 'In_data', 'Image', global_parameters["user_choices"]["raw_img"]) @@ -308,7 +365,7 @@ def create_image_compositions(global_parameters, location, current_date, heavy=F # -------------------------------------------- # ------ Low resolution TIF with all the bands # Preparation - resolution = 60 + resolution = global_parameters["features"]["resolution"] out_dir_bands = op.join(global_parameters["user_choices"]["main_dir"], 'Intermediate') additional_bands = [] @@ -319,9 +376,11 @@ def create_image_compositions(global_parameters, location, current_date, heavy=F out_tif = op.join(out_dir_bands, (indice + '.tif')) create_specific_indices(bands_dir, out_tif, indice_name=indice, resolution=resolution) additional_bands.append(str(out_tif)) + # Create VRT + create_vrt(out_tif) # Create the ratios - ratios = create_ratio_bands(global_parameters, bands_dir, out_dir_bands, resolution=60) + ratios = create_ratio_bands(global_parameters, bands_dir, out_dir_bands, resolution=resolution) additional_bands.extend(ratios) use_DTM = str2bool(global_parameters["features"]["DTM"]) @@ -336,6 +395,16 @@ def create_image_compositions(global_parameters, location, current_date, heavy=F except: print('ERROR : THE DTM DOES NOT EXIST !!!') + use_slope = str2bool(global_parameters["features"]["slope"]) + + if use_slope: + # Append the slope + try: + slope_file = slope_addition(global_parameters, resolution=resolution) + additional_bands.append(str(slope_file)) + except: + print('ERROR : THE SLOPE FILE DOES NOT EXIST !!!') + create_textures = str2bool(global_parameters["features"]["textures"]) # Create the texture features if create_textures: @@ -352,12 +421,13 @@ def create_image_compositions(global_parameters, location, current_date, heavy=F additional_bands.append(str(out_tif)) # Create time difference features - bands_num = [int(band) for band in global_parameters["features"]["time_difference_bands"]] - out_dir_bands = op.join(global_parameters["user_choices"]["main_dir"], 'Intermediate') - for band_num in bands_num: - out_tif = op.join(out_dir_bands, ('time_' + str(band_num) + '.tif')) - create_time_difference_band(global_parameters, band_num, out_tif, resolution=resolution) - additional_bands.append(str(out_tif)) + if not str2bool(unique_image): + bands_num = [int(band) for band in global_parameters["features"]["time_difference_bands"]] + out_dir_bands = op.join(global_parameters["user_choices"]["main_dir"], 'Intermediate') + for band_num in bands_num: + out_tif = op.join(out_dir_bands, ('time_' + str(band_num) + '.tif')) + create_time_difference_band(global_parameters, band_num, out_tif, resolution=resolution) + additional_bands.append(str(out_tif)) # --- Create the main TIF with low resolution # create intermediate resolution files @@ -386,6 +456,9 @@ def create_image_compositions(global_parameters, location, current_date, heavy=F # create the concatenated TIF compose_bands_heavy(intermediate_sizes_paths, str(out_all_bands_tif)) + # Create a true color vrt + create_rgb(intermediate_bands_dir) + # -------------------------------------------- # ---- High resolution TIF with some bands only # same working principle but with different resolution @@ -436,27 +509,38 @@ def create_no_data_tif(global_parameters, out_tif, dilation_radius=10): ''' location = global_parameters["user_choices"]["location"] current_date = global_parameters["user_choices"]["current_date"] - clear_date = global_parameters["user_choices"]["clear_date"] - - current_dir, current_band_prefix, current_date = find_directory_names.get_L1C_dir( - location, current_date, display=False) - clear_dir, clear_band_prefix, clear_date = find_directory_names.get_L1C_dir( - location, clear_date, display=False) + unique_image = global_parameters["user_choices"]["unique_image"] # Band number, the 1 is 20m resolution, change it if # other resolution is wanted band_num_str = '{:02d}'.format(1) + current_dir, current_band_prefix, current_date = find_directory_names.get_L1C_dir( + location, current_date, display=False) cloudy_band = glob.glob(op.join(current_dir, (current_band_prefix + band_num_str + '.jp2')))[0] - clear_band = glob.glob(op.join(clear_dir, (clear_band_prefix + band_num_str + '.jp2')))[0] - - # Selection of the no_data pixels - BandMathX = otbApplication.Registry.CreateApplication("BandMathX") - BandMathX.SetParameterStringList("il", [str(cloudy_band), str(clear_band)]) - expression = "(im1b1 <= 0 or im2b1 <= 0) ? 1 : 0" - BandMathX.SetParameterString("exp", expression) - BandMathX.UpdateParameters() - BandMathX.Execute() + + if not str2bool(unique_image): + clear_date = global_parameters["user_choices"]["clear_date"] + clear_dir, clear_band_prefix, clear_date = find_directory_names.get_L1C_dir( + location, clear_date, display=False) + clear_band = glob.glob(op.join(clear_dir, (clear_band_prefix + band_num_str + '.jp2')))[0] + + # Selection of the no_data pixels + BandMathX = otbApplication.Registry.CreateApplication("BandMathX") + BandMathX.SetParameterStringList("il", [str(cloudy_band), str(clear_band)]) + expression = "(im1b1 <= 0 or im2b1 <= 0) ? 1 : 0" + BandMathX.SetParameterString("exp", expression) + BandMathX.UpdateParameters() + BandMathX.Execute() + else: + # Selection of the no_data pixels + BandMathX = otbApplication.Registry.CreateApplication("BandMathX") + BandMathX.SetParameterStringList("il", [str(cloudy_band)]) + expression = "(im1b1 <= 0) ? 1 : 0" + BandMathX.SetParameterString("exp", expression) + BandMathX.UpdateParameters() + BandMathX.Execute() + # Dilatation of the zones, to have some margin. radius in pixels Dilatation = otbApplication.Registry.CreateApplication("BinaryMorphologicalOperation") Dilatation.SetParameterInputImage("in", BandMathX.GetParameterOutputImage("out")) @@ -468,6 +552,41 @@ def create_no_data_tif(global_parameters, out_tif, dilation_radius=10): Dilatation.ExecuteAndWriteOutput() +def create_vrt_files(global_parameters): + ''' + Creation of VRT files NDVI, NDWI. + ''' + + in_file = global_parameters["user_choices"]["raw_img"] + + out_dir_bands = op.join(global_parameters["user_choices"]["main_dir"], 'Intermediate') + temp_split_bands = op.join(out_dir_bands, global_parameters["vrt"]["temporary_file"]) + + # Split input multibands image in one tif per band + SplitImage = otbApplication.Registry.CreateApplication("SplitImage") + SplitImage.SetParameterString("in", str(in_file)) + SplitImage.SetParameterString("out", str(temp_split_bands)) + SplitImage.UpdateParameters() + SplitImage.ExecuteAndWriteOutput() + + list_vrt_type = ["ndvi", "ndwi"] + + # Create the vrt files + for vrt_type in list_vrt_type: + if (global_parameters["vrt"][vrt_type]): + bands_num_rgb = [int(band) for band in global_parameters["vrt"][vrt_type]] + create_vrt(bands_num_rgb,temp_split_bands, vrt_type) + + +def create_vrt(in_file): + """ + Creation of a VRT file. + """ + vrt_file = in_file.split(".")[0] + ".vrt" + build_vrt = 'gdalbuildvrt {} {}'.format(vrt_file, in_file) + os.system(build_vrt) + + def str2bool(v): ''' Converts a string to a boolean @@ -484,12 +603,13 @@ def main(): global_parameters = json.load(open(op.join('parameters_files', 'global_parameters.json'))) out_tif = 'tmp/tmp_tif.tif' + create_no_data_tif(global_parameters, out_tif, resolution=60) current_date = '20170520' location = 'Pretoria' - create_image_compositions(global_parameters, location, current_date, heavy=False) + create_image_compositions(global_parameters, location, current_date, unique_image, heavy=False) return diff --git a/OTB_workflow.py b/OTB_workflow.py index ebf7a68..c60299b 100644 --- a/OTB_workflow.py +++ b/OTB_workflow.py @@ -1,4 +1,5 @@ #!/usr/bin/python +# -*- coding: utf-8 -*- """ Tool to generate reference cloud masks for validation of operational cloud masks. The elaboration is performed using an active learning procedure. @@ -309,8 +310,13 @@ def image_classification(global_parameters, shell=True, proceed=True, additional if proceed == True: if shell == True: print(" Image Classification (shell)") - command = 'otbcli_ImageClassifier -in {} -model {} -out {} -confmap {} -mask {}'.format( - raw_img, model, img_labeled, confidence_map, mask_tif) + if (global_parameters["postprocessing"]["confidence_map"] == "True"): + command = 'otbcli_ImageClassifier -in {} -model {} -out {} -confmap {} -mask {}'.format( + raw_img, model, img_labeled, confidence_map, mask_tif) + else: + command = 'otbcli_ImageClassifier -in {} -model {} -out {} -mask {}'.format( + raw_img, model, img_labeled, mask_tif) + subprocess.call(command, shell=True) else: @@ -318,31 +324,30 @@ def image_classification(global_parameters, shell=True, proceed=True, additional ImageClassifier = otbApplication.Registry.CreateApplication("ImageClassifier") ImageClassifier.SetParameterString("in", str(raw_img)) ImageClassifier.SetParameterString("model", str(model)) - ImageClassifier.SetParameterString("out", str(img_labeled)) - ImageClassifier.SetParameterString("confmap", str(confidence_map)) + if (global_parameters["postprocessing"]["confidence_map"] == "True"): + ImageClassifier.SetParameterString("confmap", str(confidence_map)) ImageClassifier.SetParameterString("mask", str(mask_tif)) ImageClassifier.UpdateParameters() - ImageClassifier.ExecuteAndWriteOutput() + ImageClassifier.Execute() print('Done') else: print("Classification not done this time") - return img_labeled + return ImageClassifier -def confidence_map_viz(global_parameters, additional_name=''): +def confidence_map_viz(global_parameters, app_confidence_map, additional_name=''): ''' 6.5 Enhancement of the classification map ''' main_dir = global_parameters["user_choices"]["main_dir"] - confidence_map_in = op.join(main_dir, 'Out', "confidence{}.tif".format(additional_name)) confidence_map_out = op.join( main_dir, 'Out', "confidence{}_enhanced.tif".format(additional_name)) confidence_map_exploitation.confidence_map_change( - confidence_map_in, confidence_map_out, median_radius=11) + app_confidence_map, confidence_map_out, median_radius=11) return # -------- 3. POST-PROCESSING --------------------- @@ -414,14 +419,13 @@ def compute_mat_conf(global_parameters, show=True, proceed=True): return conf_matrix -def classification_regularization(global_parameters, proceed=True, radius=2): +def classification_regularization(global_parameters, img_labeled, proceed=True, radius=2): ''' 9. Regularization of the classification map ''' main_dir = global_parameters["user_choices"]["main_dir"] - img_labeled = op.join(main_dir, 'Out', global_parameters["general"]["img_labeled"]) img_regularized = op.join( main_dir, 'Out', global_parameters["general"]["img_labeled_regularized"]) @@ -429,7 +433,7 @@ def classification_regularization(global_parameters, proceed=True, radius=2): "ClassificationMapRegularization") # The following lines set all the application parameters: - ClassificationMapRegularization.SetParameterString("io.in", str(img_labeled)) + ClassificationMapRegularization.SetParameterInputImage("io.in", img_labeled) ClassificationMapRegularization.SetParameterString("io.out", str(img_regularized)) ClassificationMapRegularization.SetParameterInt("ip.radius", radius) ClassificationMapRegularization.EnableParameter("ip.suvbool") diff --git a/README.md b/README.md index 30dea30..747d0f9 100644 --- a/README.md +++ b/README.md @@ -2,6 +2,7 @@ The Active Learning for Cloud Detection (ALCD) software enables to generate reference cloud masks which may be used to validate operational cloud masks, such as those generated by MAJA. The reference cloud masks are generated interactively using an iterative active learning procedure which enables to generate an accurate reference cloud mask in less than two hours. +The chain may be used also for other land cover classifications, as weel as single images classifications. Improvements on finer resolution and processing optimisations have been implemented by the assesment team for a water surface chain. The tool was written by Louis Baetens during a training period at CESBIO, funded by CNES, under supervision of Olivier Hagolle. The tool led to a publication which is provided as a reference. diff --git a/all_run_alcd.py b/all_run_alcd.py index 342d860..01fb2f5 100644 --- a/all_run_alcd.py +++ b/all_run_alcd.py @@ -27,16 +27,13 @@ """ -import sys import os import os.path as op import json import shutil import argparse import tempfile -import glob -import OTB_workflow import masks_preprocessing import layers_creation import L1C_band_composition @@ -46,11 +43,11 @@ import confidence_map_exploitation -def initialization_global_parameters(main_dir, raw_img_name, location, current_date, clear_date): +def initialization_global_parameters(main_dir, raw_img_name, location, current_date, clear_date, unique_image): ''' To initialize the path and name in the JSON file Must be done at the very beggining ''' - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) json_path = op.join('parameters_files', 'global_parameters.json') jsonFile = open(json_path, "r") # Open the JSON file for reading @@ -64,6 +61,7 @@ def initialization_global_parameters(main_dir, raw_img_name, location, current_d data["user_choices"]["clear_date"] = clear_date data["user_choices"]["location"] = location data["user_choices"]["tile"] = paths_configuration["tile_location"][location] + data["user_choices"]["unique_image"] = unique_image # Save our changes to JSON file jsonFile = open(json_path, "w+") @@ -133,32 +131,38 @@ def invitation_to_copy(global_parameters, first_iteration=False): source=source_dir, destination=dest_dir)) -def run_all(part, first_iteration=False, location=None, wanted_date=None, clear_date=None, k_fold_step=None, k_fold_dir=None, force=False): +def run_all(part, first_iteration=False, location=None, wanted_date=None, clear_date=None, unique_image=False, k_fold_step=None, k_fold_dir=None, force=False): + if part == 1: # Define the main parameters for the algorithm # If all is filled, will update the JSON file - if location != None and wanted_date != None and clear_date != None: - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + if location != None and wanted_date != None and (clear_date != None or unique_image != None): + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) Data_PCC_dir = paths_configuration["data_paths"]["data_pcc"] Data_ALCD_dir = paths_configuration["data_paths"]["data_alcd"] tile = paths_configuration["tile_location"][location] - if find_directory_names.is_valid_date(location, wanted_date): - current_date = wanted_date + if not str2bool(unique_image): + if find_directory_names.is_valid_date(location, wanted_date): + current_date = wanted_date + else: + print('Error: please enter a valid wanted date') + raise NameError('Invalid wanted date') + + if not find_directory_names.is_valid_date(location, clear_date): + print('Error: please enter a valid cloud free date') + raise NameError('Invalid cloud free date') else: - print('Error: please enter a valid wanted date') - raise NameError('Invalid wanted date') + current_date = wanted_date + clear_date = "" - if not find_directory_names.is_valid_date(location, clear_date): - print('Error: please enter a valid cloud free date') - raise NameError('Invalid cloud free date') main_dir = op.join(Data_ALCD_dir, (location + '_' + tile + '_' + current_date)) raw_img_name = location + "_bands.tif" # Initialize the parameters with them initialization_global_parameters( - main_dir, raw_img_name, location, current_date, clear_date) + main_dir, raw_img_name, location, current_date, clear_date, unique_image) # Load the parameters global_parameters = json.load( @@ -166,7 +170,7 @@ def run_all(part, first_iteration=False, location=None, wanted_date=None, clear_ if first_iteration == True: # Create the directories - OTB_workflow.create_directories(global_parameters) + OTB_wf.create_directories(global_parameters) # Copy the global_parameters files to save it src = op.join('parameters_files', 'global_parameters.json') @@ -174,9 +178,14 @@ def run_all(part, first_iteration=False, location=None, wanted_date=None, clear_ ["main_dir"], 'In_data', 'used_global_parameters.json') shutil.copyfile(src, dst) + if not str2bool(unique_image): + heavy = True + else: + heavy = False + # Create the images .tif and .jp2, i.e. the features L1C_band_composition.create_image_compositions( - global_parameters, location, current_date, heavy=True, force=force) + global_parameters, location, current_date, unique_image, heavy, force=force) # Create the empty layers layers_creation.create_all_classes_empty_layers(global_parameters, force=force) @@ -216,19 +225,24 @@ def run_all(part, first_iteration=False, location=None, wanted_date=None, clear_ # Train the model and classify the image OTB_wf.train_model(global_parameters, shell=False, proceed=True) additional_name = '' - OTB_wf.image_classification(global_parameters, shell=False, + appClassif = OTB_wf.image_classification(global_parameters, shell=False, proceed=True, additional_name=additional_name) - OTB_wf.confidence_map_viz(global_parameters, additional_name=additional_name) + + if str2bool(global_parameters["postprocessing"]["confidence_map"]): + OTB_wf.confidence_map_viz(global_parameters, appClassif.GetParameterOutputImage("confmap"), + additional_name=additional_name) # regularization_radius in pixel regularization_radius = int( global_parameters["training_parameters"]["regularization_radius"]) OTB_wf.classification_regularization( - global_parameters, proceed=True, radius=regularization_radius) + global_parameters, appClassif.GetParameterOutputImage("out"), proceed=True, radius=regularization_radius) elif part == 5: # Compute some metrics OTB_wf.compute_mat_conf(global_parameters) - OTB_wf.fancy_classif_viz(global_parameters, proceed=True) + + if not str2bool(global_parameters["user_choices"]["unique_image"]): + OTB_wf.fancy_classif_viz(global_parameters, proceed=True) try: confidence_map_exploitation.compute_all_confidence_stats(global_parameters) confidence_map_exploitation.plot_confidence_evolution(global_parameters) @@ -241,7 +255,8 @@ def run_all(part, first_iteration=False, location=None, wanted_date=None, clear_ elif part == 6: # Create the contours for a better visualisation - OTB_wf.create_contour_from_labeled(global_parameters, proceed=True) + if not str2bool(global_parameters["user_choices"]["unique_image"]): + OTB_wf.create_contour_from_labeled(global_parameters, proceed=True) def str2bool(v): @@ -269,7 +284,9 @@ def main(): parser.add_argument('-d', action='store', default=None, dest='wanted_date', help='Date, The desired date to process (e.g. 20170702)') parser.add_argument('-c', action='store', default=None, dest='clear_date', - help='Date, The nearest clear date (e.g. 20170704)') + help = 'Date, The nearest clear date (e.g. 20170704)') + parser.add_argument('-u', action='store', default='false', + dest='unique_image',help='Bool, Only one image processed') parser.add_argument('-dates', action='store', default='false', dest='get_dates', help='Bool, Get the available dates') parser.add_argument('-kfold', action='store', default='false', @@ -301,6 +318,7 @@ def main(): wanted_date = results.wanted_date clear_date = results.clear_date + unique_image = results.unique_image force = str2bool(results.force) @@ -345,7 +363,7 @@ def main(): if user_input == 0: run_all(part=1, first_iteration=first_iteration, location=location, - wanted_date=wanted_date, clear_date=clear_date, force=force) + wanted_date=wanted_date, clear_date=clear_date, unique_image=unique_image, force=force) elif user_input == 1: run_all(part=2, first_iteration=first_iteration, force=force) run_all(part=3, first_iteration=first_iteration, force=force) diff --git a/confidence_map_exploitation.py b/confidence_map_exploitation.py index 0356833..d9015d1 100644 --- a/confidence_map_exploitation.py +++ b/confidence_map_exploitation.py @@ -38,7 +38,7 @@ import merge_shapefiles -def confidence_map_change(in_tif, out_tif, median_radius=5): +def confidence_map_change(app_confidence_map, out_tif, median_radius=5): ''' Change the confidence map to an enhanced one ''' @@ -47,7 +47,7 @@ def confidence_map_change(in_tif, out_tif, median_radius=5): median_radius = median_radius+1 MedianFilter = otbApplication.Registry.CreateApplication("BandMathX") - MedianFilter.SetParameterStringList("il", [str(in_tif)]) + MedianFilter.AddImageToParameterInputImageList("il", app_confidence_map) MedianFilter.SetParameterString("out", str(out_tif)) MedianFilter.SetParameterString( "exp", "(median(im1b1N{}x{}))".format(median_radius, median_radius)) diff --git a/expand_point_region.py b/expand_point_region.py index b61df15..2cbaf6e 100644 --- a/expand_point_region.py +++ b/expand_point_region.py @@ -32,7 +32,48 @@ import numpy as np -def create_squares(in_shp, out_shp, max_dist_X, max_dist_Y): +def compute_polygon(point, dist_X, dist_Y, out_layer): + ''' + For each point, a square is computed from dist_X + and dist_Y in each direction. + ''' + + ingeom = point.GetGeometryRef() + + Xpoint = ingeom.GetX(0) + Ypoint = ingeom.GetY(0) + + # definition of the square sides + left = Xpoint-dist_X + right = Xpoint + dist_X + top = Ypoint+dist_Y + bottom = Ypoint-dist_Y + + border = ogr.Geometry(ogr.wkbLinearRing) + # 4 corners, needs to be closed with the 5th point + border.AddPoint(left, top) + border.AddPoint(right, top) + border.AddPoint(right, bottom) + border.AddPoint(left, bottom) + border.AddPoint(left, top) + + # create the polygon + poly = ogr.Geometry(ogr.wkbPolygon) + poly.AddGeometry(border) + + # assign the class and expand status + featureDefn = out_layer.GetLayerDefn() + feature = ogr.Feature(featureDefn) + feature.SetGeometry(poly) + + current_class = point.GetField("class") + feature.SetField("class", current_class) + expand_status = point.GetField("expand") + feature.SetField("expand", expand_status) + out_layer.CreateFeature(feature) + + +def create_squares(in_shp, out_shp, max_dist_X, max_dist_Y, half_res_X, half_res_Y): ''' Create a neighbourhoud around all the points in a shapefile For each point, a square around it is created, from -max_dist to +max_dist @@ -68,39 +109,22 @@ def create_squares(in_shp, out_shp, max_dist_X, max_dist_Y): outLayer.CreateField(classField) print('{} squares will be created'.format(len(inLayer))) + # Add an expand field + expandField = ogr.FieldDefn("expand", ogr.OFTInteger) + expandField.SetSubType(ogr.OFSTBoolean) + outLayer.CreateField(expandField) + # Create the feature and set values for point in inLayer: current_class = point.GetField("class") - if current_class != None: - ingeom = point.GetGeometryRef() - - Xpoint = ingeom.GetX(0) - Ypoint = ingeom.GetY(0) - - # definition of the square sides - left = Xpoint-max_dist_X - right = Xpoint + max_dist_X - top = Ypoint+max_dist_Y - bottom = Ypoint-max_dist_Y - - border = ogr.Geometry(ogr.wkbLinearRing) - # 4 corners, needs to be closed with the 5th point - border.AddPoint(left, top) - border.AddPoint(right, top) - border.AddPoint(right, bottom) - border.AddPoint(left, bottom) - border.AddPoint(left, top) - - # create the polygon - poly = ogr.Geometry(ogr.wkbPolygon) - poly.AddGeometry(border) - - # assign the class - featureDefn = outLayer.GetLayerDefn() - feature = ogr.Feature(featureDefn) - feature.SetGeometry(poly) - feature.SetField("class", current_class) - outLayer.CreateFeature(feature) + expand_status = point.GetField("expand") + + if current_class != None and expand_status == True: + compute_polygon(point, max_dist_X, max_dist_Y, outLayer) + + # Add points with no expand computed + elif current_class != None and expand_status == False: + compute_polygon(point, half_res_X, half_res_Y, outLayer) # Close DataSource inDataSource.Destroy() diff --git a/find_directory_names.py b/find_directory_names.py index e659096..35566ee 100644 --- a/find_directory_names.py +++ b/find_directory_names.py @@ -38,7 +38,7 @@ def get_all_dates(location): ''' Get all dates for a given location ''' - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) L1C_dir = paths_configuration["global_chains_paths"]["L1C"] location_dir = op.join(L1C_dir, location) @@ -107,7 +107,7 @@ def get_L1C_dir(location, wanted_date, display=True): Get the path of the L1C directory If the date is not valid, returns the closest one (after) ''' - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) L1C_dir = paths_configuration["global_chains_paths"]["L1C"] location_dir = op.join(L1C_dir, location) diff --git a/layers_creation.py b/layers_creation.py index 7bddcb1..9c30922 100644 --- a/layers_creation.py +++ b/layers_creation.py @@ -66,6 +66,11 @@ def empty_shapefile_creation(in_tif, out_shp_list, geometry_type='point'): classField = ogr.FieldDefn("class", ogr.OFTInteger) outLayer.CreateField(classField) + # Add an expand field + expandField = ogr.FieldDefn("expand", ogr.OFTInteger) + expandField.SetSubType(ogr.OFSTBoolean) + outLayer.CreateField(expandField) + # Close DataSource outDataSource.Destroy() diff --git a/masks_preprocessing.py b/masks_preprocessing.py index 1f7d9ad..1eac390 100644 --- a/masks_preprocessing.py +++ b/masks_preprocessing.py @@ -72,10 +72,14 @@ def split_and_augment(global_parameters, k_fold_step=None, k_fold_dir=None): max_dist_X = float(global_parameters["training_parameters"]["expansion_distance"]) max_dist_Y = float(global_parameters["training_parameters"]["expansion_distance"]) + half_res_X = float(global_parameters["features"]["resolution"])/2.0 + half_res_Y = float(global_parameters["features"]["resolution"])/2.0 + # the employed method is the squares one - expand_point_region.create_squares(training_shp, training_shp_extended, max_dist_X, max_dist_Y) expand_point_region.create_squares( - validation_shp, validation_shp_extended, max_dist_X, max_dist_Y) + training_shp, training_shp_extended, max_dist_X, max_dist_Y, half_res_X, half_res_Y) + expand_point_region.create_squares( + validation_shp, validation_shp_extended, max_dist_X, max_dist_Y, half_res_X, half_res_Y) def load_kfold(train_shp, validation_shp, k_fold_step, k_fold_dir): @@ -146,17 +150,19 @@ def masks_preprocess(global_parameters, k_fold_step=None, k_fold_dir=None): layers_to_merge = [] layers_classes = [] + layers_expand = [] # append the classes names and numbers for mask_name, mask_values in global_parameters["masks"].iteritems(): layers_to_merge.append(op.join(main_dir, 'In_data', 'Masks', mask_values["shp"])) layers_classes.append(mask_values["class"]) + layers_expand.append(mask_values["expand"]) merged_layers = op.join(main_dir, 'Intermediate', global_parameters["general"]["merged_layers"]) print(' Merge the classes shapefiles into one') merge_shapefiles.merge_shapefiles(in_shp_list=layers_to_merge, - class_list=layers_classes, out_shp=merged_layers) + class_list=layers_classes, expand_list = layers_expand, out_shp=merged_layers) print('Done') if k_fold_step != None and k_fold_dir != None: diff --git a/merge_shapefiles.py b/merge_shapefiles.py index cc4870f..099418a 100644 --- a/merge_shapefiles.py +++ b/merge_shapefiles.py @@ -27,9 +27,10 @@ import os import os.path as op import ogr +from distutils.util import strtobool -def merge_shapefiles(in_shp_list, class_list, out_shp): +def merge_shapefiles(in_shp_list, class_list, expand_list, out_shp): ''' Create a merged shapefile The class_list should be in the same order than the in_shp_list @@ -38,6 +39,7 @@ def merge_shapefiles(in_shp_list, class_list, out_shp): for k in range(len(in_shp_list)): in_shp = in_shp_list[k] current_class = class_list[k] + expand_class = expand_list[k] inDriver = ogr.GetDriverByName("ESRI Shapefile") inDataSource = inDriver.Open(in_shp, 0) @@ -60,6 +62,11 @@ def merge_shapefiles(in_shp_list, class_list, out_shp): classField = ogr.FieldDefn("class", ogr.OFTInteger) outLayer.CreateField(classField) + # Add an expand field + expandField = ogr.FieldDefn("expand", ogr.OFTInteger) + expandField.SetSubType(ogr.OFSTBoolean) + outLayer.CreateField(expandField) + # Create the feature and set values for point in inLayer: ingeom = point.GetGeometryRef() @@ -68,6 +75,7 @@ def merge_shapefiles(in_shp_list, class_list, out_shp): feature = ogr.Feature(featureDefn) feature.SetGeometry(ingeom) feature.SetField("class", current_class) + feature.SetField("expand", strtobool(expand_class)) outLayer.CreateFeature(feature) # Close DataSource diff --git a/metrics_exploitation.py b/metrics_exploitation.py index f7488ee..f4192b8 100644 --- a/metrics_exploitation.py +++ b/metrics_exploitation.py @@ -315,7 +315,7 @@ def retrieve_Kfold_data(global_parameters, metrics_plotting=False, location='', stats on all the runs ''' if location != '' and date != '': - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) Data_ALCD_dir = paths_configuration["data_paths"]["data_alcd"] main_dir = glob.glob(op.join(Data_ALCD_dir, '{}_*_{}'.format(location, date)))[0] @@ -408,7 +408,7 @@ def retrieve_Kfold_data(global_parameters, metrics_plotting=False, location='', def load_previous_global_parameters(location, date): - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) Data_ALCD_dir = paths_configuration["data_paths"]["data_alcd"] main_dir_path = glob.glob(op.join(Data_ALCD_dir, '{}_*_{}'.format(location, date)))[0] diff --git a/parameters_files/global_parameters.json b/parameters_files/global_parameters.json index 0b91ca9..d6c83ba 100644 --- a/parameters_files/global_parameters.json +++ b/parameters_files/global_parameters.json @@ -6,7 +6,7 @@ "otb": "otb_table.txt" }, "features": { - "DTM": "True", + "DTM": "True", "original_bands": [ "1", "2", @@ -21,23 +21,25 @@ "11", "12" ], - "ratios": [], + "ratios": [], + "resolution": "60", + "slope": "False", "special_indices": [ "NDVI", "NDWI" ], "textures": "False", "time_difference_bands": [ - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9", - "11", + "1", + "2", + "3", + "4", + "5", + "6", + "7", + "8", + "9", + "11", "12" ] }, @@ -62,52 +64,72 @@ }, "masks": { "background": { - "class": "1", + "class": "1", + "expand": "True", "shp": "background.shp" }, "clouds_shadows": { - "class": "4", + "class": "4", + "expand": "True", "shp": "clouds_shadows.shp" }, "high_clouds": { - "class": "3", + "class": "3", + "expand": "True", "shp": "high_clouds.shp" }, "land": { - "class": "5", + "class": "5", + "expand": "True", "shp": "land.shp" - }, + }, + "land_1pixel": { + "class": "5", + "expand": "False", + "shp": "land_1pixel.shp" + }, "low_clouds": { - "class": "2", + "class": "2", + "expand": "True", "shp": "low_clouds.shp" }, "snow": { - "class": "7", + "class": "7", + "expand": "True", "shp": "snow.shp" }, "water": { - "class": "6", + "class": "6", + "expand": "True", "shp": "water.shp" + }, + "water_1pixel": { + "class": "6", + "expand": "False", + "shp": "water_1pixel.shp" } }, "postprocessing": { - "binary_confusion_matrix": "bin_confusion_matrix.csv", - "confusion_matrix": "confusion_matrix.csv", + "binary_confusion_matrix": "bin_confusion_matrix.csv", + "confusion_matrix": "confusion_matrix.csv", + "confidence_map": "True", "model_metrics": "model_metrics.csv" }, "training_parameters": { "Kfold": "10", "dilatation_radius": "2", - "expansion_distance": "100", + "expansion_distance": "100", "regularization_radius": "1", "training_proportion": "0.9" }, "user_choices": { - "clear_date": "20170112", - "current_date": "20170102", - "location": "Marrakech", - "main_dir": "/mnt/data/SENTINEL2/ALCD/ALCD/Marrakech_29RPQ_20170102", - "raw_img": "Marrakech_bands.tif", + "clear_date": "20170112", + "current_date": "20170102", + "unique_image": "False", + "location": "Marrakech", + "main_dir": "/mnt/data/SENTINEL2/ALCD/ALCD/Marrakech_29RPQ_20170102", + "raw_img": "Marrakech_bands.tif", + "slope": "", "tile": "29RPQ" } -} \ No newline at end of file +} diff --git a/parameters_files/global_parameters.json~ b/parameters_files/global_parameters.json~ deleted file mode 100644 index cfc55dd..0000000 --- a/parameters_files/global_parameters.json~ +++ /dev/null @@ -1,113 +0,0 @@ -{ - "classification": { - "method": "rf" - }, - "color_tables": { - "otb": "otb_table.txt" - }, - "features": { - "DTM": "True", - "original_bands": [ - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9", - "10", - "11", - "12" - ], - "ratios": [], - "special_indices": [ - "NDVI", - "NDWI" - ], - "textures": "False", - "time_difference_bands": [ - "1", - "2", - "3", - "4", - "5", - "6", - "7", - "8", - "9", - "11", - "12" - ] - }, - "general": { - "class_stats": "class_stats.xml", - "img_labeled": "labeled_img.tif", - "img_labeled_regularized": "labeled_img_regular.tif", - "img_stats": "img_stats.xml", - "merged_layers": "merged.shp", - "no_data_mask": "no_data.shp", - "training_samples_extracted": "training_samples_extracted.sqlite", - "training_samples_location": "training_samples_location.sqlite", - "training_sampling": "smallest", - "training_shp": "train_points.shp", - "training_shp_extended": "train_points_ext.shp", - "validation_shp": "validation_points.shp", - "validation_shp_extended": "validation_points_ext.shp" - }, - "local_paths": { - "copy_folder": "/home/baetensl/Documents/Local_masks_modification", - "current_server": "baetensl@s2calc.cesbio.cnes.fr:/" - }, - "masks": { - "background": { - "class": "1", - "shp": "background.shp" - }, - "clouds_shadows": { - "class": "4", - "shp": "clouds_shadows.shp" - }, - "high_clouds": { - "class": "3", - "shp": "high_clouds.shp" - }, - "land": { - "class": "5", - "shp": "land.shp" - }, - "low_clouds": { - "class": "2", - "shp": "low_clouds.shp" - }, - "snow": { - "class": "7", - "shp": "snow.shp" - }, - "water": { - "class": "6", - "shp": "water.shp" - } - }, - "postprocessing": { - "binary_confusion_matrix": "bin_confusion_matrix.csv", - "confusion_matrix": "confusion_matrix.csv", - "model_metrics": "model_metrics.csv" - }, - "training_parameters": { - "Kfold": "10", - "dilatation_radius": "2", - "expansion_distance": "100", - "regularization_radius": "1", - "training_proportion": "0.9" - }, - "user_choices": { - "clear_date": "20171018", - "current_date": "20171013", - "location": "Mongu", - "main_dir": "/mnt/data/SENTINEL2/ALCD/ALCD/Mongu_34LGJ_20171013", - "raw_img": "Mongu_bands.tif", - "tile": "34LGJ" - } -} \ No newline at end of file diff --git a/parameters_files/global_parameters_example.json b/parameters_files/global_parameters_example.json index d4d6ddc..f368ad0 100644 --- a/parameters_files/global_parameters_example.json +++ b/parameters_files/global_parameters_example.json @@ -22,6 +22,8 @@ "12" ], "ratios": [], + "resolution": "60", + "slope": "False", "special_indices": [ "NDVI", "NDWI" @@ -62,37 +64,55 @@ }, "masks": { "background": { - "class": "1", + "class": "1", + "expand": "True", "shp": "background.shp" }, "clouds_shadows": { - "class": "4", + "class": "4", + "expand": "True", "shp": "clouds_shadows.shp" }, "high_clouds": { - "class": "3", + "class": "3", + "expand": "True", "shp": "high_clouds.shp" }, "land": { - "class": "5", + "class": "5", + "expand": "True", "shp": "land.shp" - }, + }, + "land_1pixel": { + "class": "5", + "expand": "False", + "shp": "land_1pixel.shp" + }, "low_clouds": { - "class": "2", + "class": "2", + "expand": "True", "shp": "low_clouds.shp" }, "snow": { - "class": "7", + "class": "7", + "expand": "True", "shp": "snow.shp" }, "water": { - "class": "6", + "class": "6", + "expand": "True", "shp": "water.shp" + }, + "water_1pixel": { + "class": "6", + "expand": "False", + "shp": "water_1pixel.shp" } }, "postprocessing": { "binary_confusion_matrix": "bin_confusion_matrix.csv", "confusion_matrix": "confusion_matrix.csv", + "confidence_map": "True", "model_metrics": "model_metrics.csv" }, "training_parameters": { @@ -105,9 +125,10 @@ "user_choices": { "clear_date": "20160427", "current_date": "20160417", + "unique_image": "False", "location": "Marrakech", "main_dir": "/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Marrakech_29RPQ_20160417", "raw_img": "Marrakech_bands.tif", "tile": "29RPQ" } -} \ No newline at end of file +} diff --git a/parameters_files/paths_configuration.json b/parameters_files/paths_configuration.json new file mode 100644 index 0000000..9d11212 --- /dev/null +++ b/parameters_files/paths_configuration.json @@ -0,0 +1,42 @@ +{ + "global_chains_paths": { + "L1C": "/home/mp/nicolasg/hydro_ref/MERGE/test/input", + "maja": "/mnt/data/SENTINEL2/L2A_MAJA", + "sen2cor": "/mnt/data/SENTINEL2/L2A_SEN2COR", + "fmask": "/mnt/data/home/baetensl/Programs/Fmask4_output", + "fmask3": "/mnt/data/home/baetensl/Programs/Output_fmask", + "DTM_input": "/mnt/data/home/baetensl/DTM/original", + "DTM_resized": "/mnt/data/home/baetensl/DTM/resized" + }, + "data_paths": { + "data_alcd": "", + "data_pcc": "/mnt/data/home/baetensl/clouds_detection_git/Data_PCC" + }, + "tile_location": { + "Alsace": "32ULU", + "Camargue": "31TFJ", + "Briere": "30TWT", + "Bretagne": "30UXU", + "Alpes": "31TGL", + "Bordeaux": "30TXQ", + "Chateauroux": "31TCM", + "Orleans": "31UDP", + "Arles": "31TFJ", + "Gobabeb": "33KWP", + "Mongu": "34LGJ", + "Pretoria": "35JPM", + "RailroadValley": "11SPC", + "29RPQ_20160417": "29RPQ", + "32TMR_20160323": "32TMR", + "32TNR_20161108": "32TNR", + "33UUU_20160817": "33UUU", + "35VLJ_20160831": "35VLJ", + "37PGQ_20151206": "37PGQ", + "49JGL_20160126": "49JGL", + "49JHM_20160126": "49JHM", + "Marrakech":"29RPQ", + "Ispra":"32TMR", + "Munich":"32UPU", + "Alta_Floresta_Brazil":"21LWK" + } +} diff --git a/synthese_alcd_runs.py b/synthese_alcd_runs.py index 01bfacd..762281c 100644 --- a/synthese_alcd_runs.py +++ b/synthese_alcd_runs.py @@ -199,7 +199,7 @@ def mean_confidence(confidence_tif): def main(): - paths_configuration = json.load(open(op.join('..', 'paths_configuration.json'))) + paths_configuration = json.load(open(op.join('parameters_files', 'paths_configuration.json'))) global_parameters = json.load(open(op.join('parameters_files', 'global_parameters.json'))) locations = ['Arles', 'Orleans', 'Gobabeb', 'RailroadValley',