Commit 0dbfc1f003a3c7533e8e5060d1f22cc62831f8c5

Authored by Louis Baetens
1 parent 92e2ee13
Exists in master

RMV the useless lines in the main()

ALCD/L1C_band_composition.py
... ... @@ -33,9 +33,7 @@ def create_composit_band(bands_full_paths, out_tif, resolution = 60, composit_ty
33 33 BandMathX = otbApplication.Registry.CreateApplication("BandMathX")
34 34 BandMathX.SetParameterStringList("il", temp_bands_full_paths)
35 35 BandMathX.SetParameterString("out", str(out_tif))
36   - # TODO : verify that it works with the additional 0.01
37   - # Should avoid having NaN in the result
38   - #~ BandMathX.SetParameterString("exp", "5000*(im1b1-im2b1)/(0.01+im1b1+im2b1)")
  36 + # 0.01 avoid having NaN in the result
39 37 BandMathX.SetParameterString("exp", "(im1b1-im2b1)/(0.01+im1b1+im2b1)")
40 38 BandMathX.UpdateParameters()
41 39 BandMathX.ExecuteAndWriteOutput()
... ... @@ -425,9 +423,8 @@ def create_no_data_tif(global_parameters, out_tif, dilation_radius = 10):
425 423 current_dir, current_band_prefix, current_date = find_directory_names.get_L1C_dir(location, current_date, display = False)
426 424 clear_dir, clear_band_prefix, clear_date = find_directory_names.get_L1C_dir(location, clear_date, display = False)
427 425  
428   - # Band number, the 10 is 60m resolution, change it if
  426 + # Band number, the 1 is 20m resolution, change it if
429 427 # other resolution is wanted
430   - #~ band_num_str = '{:02d}'.format(10)
431 428 band_num_str = '{:02d}'.format(1)
432 429  
433 430 cloudy_band = glob.glob(op.join(current_dir, (current_band_prefix + band_num_str + '.jp2')))[0]
... ... @@ -466,77 +463,14 @@ def str2bool(v):
466 463 def main():
467 464 global_parameters = json.load(open(op.join('parameters_files','global_parameters.json')))
468 465  
469   - print(global_parameters["features"]["original_bands"])
470   - original_bands = [int(band) for band in global_parameters["features"]["original_bands"]]
471   - time_difference_bands = [int(band) for band in global_parameters["features"]["time_difference_bands"]]
472   -
473   - use_DTM = str2bool(global_parameters["features"]["DTM"])
474   - create_textures = str2bool(global_parameters["features"]["textures"])
475   - print(use_DTM == True)
476   -
477   -
478   -
479   -
480   -
481   -
482   - return
483   -
484 466 out_tif = 'tmp/tmp_tif.tif'
485 467 create_no_data_tif(global_parameters, out_tif, resolution = 60)
486 468  
487   - return
488   - location = 'Orleans'
489   - out_band = 'otrlean.tif'
490   - dtm_addition(location, out_band)
491   -
492   - return
493   -
494   -
495   - #~ create_image_compositions(global_parameters)
496   -
497   - #~ day = '20'
498   - #~ month = '05'
499   - #~ year = '2017'
500 469 current_date = '20170520'
501 470 location = 'Pretoria'
502 471  
503 472 create_image_compositions(global_parameters, location, current_date, heavy = False)
504   -
505 473 return
506   -
507   - bands_dir, band_prefix, date = find_directory_names.get_L1C_dir(location, current_date, display = True)
508   -
509   - bands_num = [8,10]
510   -
511   - # get the full paths
512   - bands_full_paths = []
513   - for band in bands_num:
514   - bands_full_paths.append(str(op.join(bands_dir, band_prefix)+ '{:02d}'.format(band)+'.jp2'))
515   -
516   - out_tif = 'tmp/NDVI.tif'
517   -
518   - #~ create_composit_band(bands_full_paths, out_tif, composit_type = 'ND')
519   -
520   - in_bands_dir = bands_dir
521   -
522   - create_specific_indices(in_bands_dir, out_tif, indice_name = 'NDVI', resolution = 60)
523   -
524   -
525   - return
526   -
527   - in_bands = ['/mnt/data/home/baetensl/classification_clouds/Data/TestBands/B02.jp2', '/mnt/data/home/baetensl/classification_clouds/Data/TestBands/B03.jp2']
528   -
529   - in_band = '/mnt/data/home/baetensl/classification_clouds/Data/TestBands/compos.jp2'
530   - out_band = '/mnt/data/home/baetensl/classification_clouds/Data/TestBands/compos_res.jp2'
531   -
532   - pixelresX = 100
533   - pixelresY = 300
534   - resize_band(in_band, out_band, pixelresX, pixelresY)
535   -
536   - pixelX = 500
537   - pixelY = 500
538   -
539   - #~ compose_bands_gdal(in_bands, out_band, pixelX, pixelY)
540 474  
541 475  
542 476 if __name__=='__main__':
... ...
ALCD/OTB_workflow.py
... ... @@ -423,47 +423,11 @@ def create_contour_from_labeled(global_parameters, proceed=True):
423 423  
424 424  
425 425 def main():
426   -
427 426 global_parameters = json.load(open(op.join('parameters_files','global_parameters.json')))
428   -
429   -
430 427 train_model(global_parameters, shell=False, proceed=True)
431 428  
432 429 return
433 430  
434   -
435   - main_dir = global_parameters["user_choices"]["main_dir"]
436   - raw_img = op.join(main_dir, 'In_data', 'Image', global_parameters["user_choices"]["raw_img"])
437   -
438   - validation_shp = op.join(main_dir, 'Intermediate', global_parameters["general"]["validation_shp_extended"])
439   - training_shp = op.join(main_dir, 'Intermediate', global_parameters["general"]["training_shp_extended"])
440   - img_stats = op.join(main_dir, 'Statistics', global_parameters["general"]["img_stats"])
441   - class_stats = op.join(main_dir, 'Statistics', global_parameters["general"]["class_stats"])
442   - training_samples_location = op.join(main_dir, 'Samples', global_parameters["general"]["training_samples_location"])
443   - training_samples_extracted = op.join(main_dir, 'Samples', global_parameters["general"]["training_samples_extracted"])
444   - model = op.join(main_dir, 'Models', ('model.'+global_parameters["classification"]["method"]))
445   - img_labeled = op.join(main_dir, 'Out', global_parameters["general"]["img_labeled"])
446   -
447   -
448   -
449   -
450   -
451   - create_directories(global_parameters)
452   -
453   - compute_image_stats(global_parameters)
454   -
455   - proceed = True
456   - compute_samples_stats(global_parameters, proceed=True)
457   - select_samples(global_parameters, strategy="constant_8000", proceed=proceed)
458   - extract_samples(global_parameters, proceed=proceed)
459   -
460   - train_model(global_parameters, shell=True, proceed=True)
461   - additional_name = ''
462   - image_classification(global_parameters, shell=True, proceed=True, additional_name = additional_name)
463   -
464   - compute_mat_conf(global_parameters)
465   - fancy_classif_viz(global_parameters, proceed=True)
466   -
467 431  
468 432 if __name__ == '__main__':
469 433 main()
... ...
ALCD/confidence_map_exploitation.py
... ... @@ -254,8 +254,6 @@ def plot_confidence_evolution(global_parameters):
254 254 save_nb = range(len(save_dirs))
255 255 plt.plot(save_nb, all_pixels_mean, color='g', linestyle='-', marker='o', label = 'Of all pixels')
256 256 plt.plot(save_nb, all_samples_mean, color='b', linestyle='-', marker='o', label = 'Of the samples')
257   - #~ plt.plot(save_nb, wellclassified_samples_mean, color='g', linestyle='--', marker='o', label = 'Of the wellclassified samples')
258   - #~ plt.plot(save_nb, misclassified_samples_mean, color='r', linestyle='--', marker='o', label = 'Of the misclassified samples')
259 257  
260 258 plt.legend(bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
261 259 location = global_parameters["user_choices"]["location"]
... ... @@ -264,7 +262,6 @@ def plot_confidence_evolution(global_parameters):
264 262 plt.xlabel('Iteration')
265 263 plt.ylabel('Mean confidence')
266 264  
267   - #~ plt.show()
268 265 out_fig = op.join(main_dir, 'Statistics', 'confidence_evolution.png')
269 266 plt.savefig(out_fig, bbox_inches='tight')
270 267 plt.close()
... ... @@ -326,48 +323,10 @@ def main():
326 323 plot_samples_evolution(global_parameters)
327 324  
328 325 return
329   - #~ compute_all_confidence_stats(global_parameters)
330   -
331   - #~ return
332   -
333   - #~ main_dir = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171002/'
334   - #~ in_tif = main_dir + 'In_data/Image/Arles_bands.tif'
335   - #~ out_tif = main_dir + 'In_data/Image/to_del_extract.tif'
336   - #~ in_shp = []
337   -
338   - #~ in_shp.append(main_dir + 'In_data/Masks/water.shp')
339   - #~ in_shp.append(main_dir + 'In_data/Masks/land.shp')
340   - #~ in_shp.append(main_dir + 'In_data/Masks/low_clouds.shp')
341   - #~ in_shp = (main_dir + 'In_data/Masks')
342   -
343   - #~ shapefile_rasterization(in_tif, in_shp, out_tif)
344   -
345   - #~ return
346   -
347   - #~ global_parameters = json.load(open(op.join('parameters_files','global_parameters.json')))
348   -
349   - #~ modes = ['all','all_classified_samples','well_classified_samples','misclassified_samples']
350   - #~ samples_sets = ['both', 'train', 'validation']
351   -
352   - #~ mode = modes[3]
353   - #~ samples_set = samples_sets[1]
354   -
355   - #~ for mode in ['all','all_classified_samples','well_classified_samples','misclassified_samples']:
356   - #~ for samples_set in ['both'] :
357   -
358   - #~ confidence_map_mean(global_parameters, mode=mode, samples_set=samples_set, extended = False)
359   -
360   - #~ return
361   -
362 326  
363   -
364 327 in_tif = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20170917/Out/confidence.tif'
365   - out_tif = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20170917/Out/confidence_modified_med.tif'
366   - cutoff_threshold = 0.75
367   - expected_pixels_nb = 10
368   - #~ confidence_map_change(in_tif, out_tif, cutoff_threshold, expected_pixels_nb)
  328 + out_tif = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20170917/Out/confidence_modified_med.tif'
369 329 confidence_map_change(in_tif, out_tif, median_radius = 9)
370   - #~ confidence_map_change_median(in_tif, out_tif, median_radius = 7)
371 330  
372 331 if __name__ == '__main__':
373 332 main()
... ...
ALCD/contour_from_labeled.py
... ... @@ -237,23 +237,9 @@ def generate_all_quicks():
237 237 def main():
238 238 main_dir = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171002'
239 239 quick_contours(main_dir)
240   -
241   -
242 240 #~ generate_all_quicks()
243   -
244 241 return
245   - in_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_20170516/Out/labeled_img_regular.tif'
246   - #~ out_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_20170516/Out/contour_regular.tif'
247   - out_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_20170516/Out/contour_regular3.tif'
248   -
249   - #~ contour_from_labeled_dilatation(in_tif, out_tif, 3)
250   - #~ stack_contours(in_tif, out_tif, 15)
251   -
252   - in_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_20170516/In_data/Image/Orleans_bands.tif'
253   - label_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_20170516/Out/contour_regular3.tif'
254 242  
255   - out_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_20170516/In_data/Image/Orleans_RGB.png'
256   - rgb_stacking(in_tif, label_tif, out_tif)
257 243  
258 244 if __name__ == '__main__':
259 245 main()
... ...
ALCD/expand_point_region.py
... ... @@ -241,28 +241,11 @@ def create_squares(in_shp, out_shp, max_dist_X, max_dist_Y):
241 241  
242 242  
243 243  
244   -def main():
245   - in_shp = '/mnt/data/home/baetensl/OTB_codes/OTB_commands/Full_orleans/In_data/Masks/human_reference.shp'
246   - out_shp = '/mnt/data/home/baetensl/OTB_codes/OTB_commands/Full_orleans/In_data/Masks/NEW_delete.shp'
247   -
  244 +def main():
248 245 in_shp = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_all/In_data/Masks/land.shp'
249 246 out_shp = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_all/In_data/Masks/SQUARES.shp'
250 247  
251   -
252   - #~ in_shp = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_all/Intermediate/train_points.shp'
253   - #~ out_shp = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_all/Intermediate/train_points222.shp'
254   -
255   - #~ createBuffer(in_shp, out_shp, buffer_dist = 0.01)
256   - #~ translate_points(in_shp, out_shp, translation_dist = 0.01)
257   -
258   - km = 50000
259   - degX = km_to_deg_convert(km, direction='lon', latitude=48)
260   - degY = km_to_deg_convert(km, direction='lat', latitude=48)
261   -
262   - degX = 100
263   - degY = 100
264   - #~ create_neighbors(in_shp, out_shp, max_dist_X = degX, max_dist_Y = degY, nb_samples=5)
265   - create_squares(in_shp, out_shp, max_dist_X = degX, max_dist_Y = degY)
  248 + create_squares(in_shp, out_shp, max_dist_X = 100, max_dist_Y = 100)
266 249  
267 250 if __name__ == '__main__':
268 251 main()
... ...
ALCD/find_directory_names.py
... ... @@ -36,7 +36,6 @@ def get_all_dates(location):
36 36 dates.append(last_dir[start_date_index+1:start_date_index+9])
37 37  
38 38 dates = sorted(dates)
39   - #~ print(dates)
40 39 return dates
41 40  
42 41  
... ... @@ -84,23 +83,13 @@ def get_L1C_dir(location, wanted_date, display = True):
84 83 '''
85 84 Get the path of the L1C directory
86 85 If the date is not valid, returns the closest one (after)
87   -
88   - TODO ? : REMOVE THE CLOSEST DATE AND RETURNS AN ERROR IF THE DATE IS INVALID ?
89 86 '''
90 87 paths_configuration = json.load(open(op.join('..', 'paths_configuration.json')))
91 88 L1C_dir = paths_configuration["global_chains_paths"]["L1C"]
92 89 location_dir = op.join(L1C_dir, location)
93 90  
94   - # CLOSEST DATE REMOVED, SHOULD STILL WORK
95   - #~ date = get_closest_dates(location, wanted_date, mode = 'after', nb_days = 1)
96   -
97   - #~ if len(date) == 0:
98   - #~ date = get_closest_dates(location, wanted_date, mode = 'before', nb_days = 1)
99   - #~ date = date[0]
100   -
101 91 date = wanted_date
102 92  
103   -
104 93 with_date = glob.glob(op.join(location_dir, 'S2*_{}*.SAFE'.format(date)))
105 94 granule = op.join(with_date[0], 'GRANULE')
106 95  
... ... @@ -123,8 +112,6 @@ def get_L1C_dir(location, wanted_date, display = True):
123 112  
124 113  
125 114 def main():
126   -
127   -
128 115 location = 'Arles'
129 116 date = '20170917'
130 117 directory = get_L1C_dir(location, date, display = False)
... ...
ALCD/layers_creation.py
... ... @@ -123,10 +123,6 @@ def create_no_data_shp(global_parameters, force = False):
123 123 '''
124 124 main_dir = global_parameters["user_choices"]["main_dir"]
125 125  
126   - #~ if op.exists(op.join(main_dir, 'In_data', 'Masks', 'no_data.shp')) and force == False:
127   - #~ print('No_data layer already present, use -force to erase and replace')
128   - #~ return
129   -
130 126 tmp_name = next(tempfile._get_candidate_names())
131 127 tmp_tif = op.join('tmp', 'no_data_mask_{}.tif'.format(tmp_name))
132 128  
... ... @@ -145,6 +141,7 @@ def create_no_data_shp(global_parameters, force = False):
145 141 simplify_geometry(tmp_shp, out_shp, tolerance = 100)
146 142 return
147 143  
  144 +
148 145 def simplify_geometry(in_shp, out_shp, tolerance = 100):
149 146 ''' Simplification of a shapefile
150 147 This allows to have lighter polygons, enhancing the rapidty
... ... @@ -162,34 +159,10 @@ def main():
162 159 global_parameters = json.load(open(op.join('parameters_files','global_parameters.json')))
163 160  
164 161 create_all_classes_empty_layers(global_parameters, force = False)
165   -
166   - return
167   - #~ in_tif = '/mnt/data/home/baetensl/classification_clouds/Data/Full_orleans/In_data/Image/orleans_bands.tif'
  162 +
168 163 create_no_data_shp(global_parameters)
169 164  
170 165 return
171   - main_dir = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171002'
172   - in_tif = op.join(main_dir, 'In_data', 'Image', 'Arles_bands.tif')
173   - out_shp = op.join(main_dir, 'Intermediate', 'no_data_autom.shp')
174   -
175   - no_data_preprocess(in_tif, out_shp)
176   -
177   - #~ fill_precomputed_classes()
178   -
179   - return
180   - in_tif = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/RailroadValley_11SPC_20170501/In_data/Image/RailroadValley_bands.tif'
181   -
182   - layers_to_create = []
183   - global_parameters = json.load(open(op.join('parameters_files','global_parameters.json')))
184   -
185   - create_all_classes_empty_layers(global_parameters)
186   -
187   - #~ for key, value in global_parameters["masks"].iteritems():
188   - #~ layers_to_create.append(op.join(global_parameters["general"]["main_dir"], 'In_data', 'Masks', value))
189   - #~ print(layers_to_create[0])
190   -
191   - #~ out_shp_list = ['/mnt/data/home/baetensl/classification_clouds/Data/Full_orleans/In_data/Image/land_shp.shp']
192   - #~ empty_shapefile_creation(in_tif, layers_to_create)
193 166  
194 167 if __name__ == '__main__':
195 168 main()
... ...
ALCD/masks_preprocessing.py
... ... @@ -152,11 +152,11 @@ def main():
152 152 k_step = 2
153 153 k_fold_dir = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171002/kfold'
154 154 load_kfold(train_shp, validation_shp, k_step, k_fold_dir)
155   -
156   - return
157   -
  155 +
158 156 masks_preprocess()
159 157 #~ split_and_augment()
160 158  
  159 + return
  160 +
161 161 if __name__ == '__main__':
162 162 main()
... ...
ALCD/metrics_exploitation.py
... ... @@ -248,14 +248,6 @@ def save_model_metrics(global_parameters):
248 248 files_of_interest = []
249 249 src_dirs = []
250 250  
251   -
252   - #~ src_dirs.append(statistics_dir)
253   - #~ files_of_interest.append(global_parameters["postprocessing"]["confusion_matrix"])
254   - #~ src_dirs.append(statistics_dir)
255   - #~ files_of_interest.append(global_parameters["postprocessing"]["binary_confusion_matrix"])
256   - #~ src_dirs.append(statistics_dir)
257   - #~ files_of_interest.append(global_parameters["postprocessing"]["model_metrics"])
258   - #~ src_dirs.append(statistics_dir)
259 251  
260 252 for interesting_param in ["confusion_matrix", "binary_confusion_matrix", "model_metrics"]:
261 253 src_dirs.append(statistics_dir)
... ... @@ -294,9 +286,6 @@ def save_model_metrics(global_parameters):
294 286  
295 287  
296 288  
297   -
298   -
299   -
300 289  
301 290 def retrieve_Kfold_data(global_parameters, metrics_plotting = False, location = '', date = ''):
302 291 '''
... ... @@ -515,9 +504,7 @@ def plot_mean_statistics_all_sites(plot_both = True):
515 504 csv_file = op.join('/mnt/data/home/baetensl/clouds_detection_git/Various_data', 'all_sites_dates.csv')
516 505 locations, _, dates = get_all_locations_dates(csv_file)
517 506  
518   -
519 507 all_metrics = []
520   -
521 508 accuracies = []
522 509 f1scores = []
523 510 scenes_names = []
... ... @@ -596,68 +583,13 @@ def plot_mean_statistics_all_sites(plot_both = True):
596 583  
597 584  
598 585  
599   - #~ plt.figure()
600   - #~ indices = [0,1,2,3]
601   - #~ accuracies = [m[0] for m in all_metrics]
602   - #~ f1scores = [m[1] for m in all_metrics]
603   - #~ recalls = [m[2] for m in all_metrics]
604   - #~ precisions = [m[3] for m in all_metrics]
605   -
606   - #~ print(len(accuracies))
607   - #~ return
608   -
609   - #~ met_nb = 0
610   - #~ for metric in [accuracies, f1scores, recalls, precisions]:
611   -
612   - #~ rnd = [(indices[met_nb] - 0.1 + 0.2*(float(k)/len(accuracies))) for k in range(len(accuracies))]
613   - #~ plt.scatter(rnd, metric, color='k', marker='.', alpha = 0.2)
614   - #~ met_nb += 1
615   - #~ plt.errorbar(indices, means[0:4], stds[0:4], linestyle='',
616   - #~ marker='o', lw=2, elinewidth = 2, capsize = 8, capthick = 1, color = 'b')
617   - #~ plt.ylim(0.5,1)
618   -
619   - #~ metrics_names = ['Accuracy\n{:.1f}%'.format(means[0]*100),
620   - #~ 'F1-score\n{:.1f}%'.format(means[1]*100),
621   - #~ 'Recall\n{:.1f}%'.format(means[2]*100),
622   - #~ 'Precision\n{:.1f}%'.format(means[3]*100)]
623   - #~ plt.xticks(indices, metrics_names)
624   -
625   - #~ nb_dates = float(len(accuracies))/11
626   - #~ plt.title('Metrics of a 10-fold cross-validation \n on {:.0f} scenes'.format(len(locations)))
627   - #~ plt.xlabel('Score type')
628   - #~ plt.ylabel('Scores')
629   -
630   -
631   - #~ # Custom legend
632   - #~ custom_lines = []
633   - #~ custom_lines.append(Line2D([0], [0], color='w', markerfacecolor = 'k', marker='.', alpha = 0.2))
634   - #~ custom_lines.append(Line2D([0], [0], color='w', markerfacecolor = 'b', marker='o', alpha = 1))
635   - #~ legend_labels = ['Single validation point', 'Mean and std of all sites']
636   - #~ plt.legend(custom_lines, legend_labels, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
637   -
638   - #~ plt.show(block=False)
639   -
640   - #~ out_fig = op.join('tmp_report', 'kfold_synthese_mean.png')
641   - #~ print('Figure saved in {}'.format(out_fig))
642   - #~ plt.savefig(out_fig, bbox_inches='tight')
643   - #~ plt.close()
644   -
645 586  
646 587  
647 588  
648 589 def main():
649   - #~ plot_mean_statistics_all_sites()
650   - #~ return
651   - #~ plot_statistics_all_sites()
652   - #~ return
653   -
654 590 global_parameters = json.load(open(op.join('parameters_files','global_parameters.json')))
655 591 retrieve_Kfold_data(global_parameters, metrics_plotting = True)
656   -
657   - #~ location = 'Arles'
658   - #~ date = 20171221
659   - #~ load_previous_global_parameters(location, date)
660   -
  592 +
661 593  
662 594  
663 595 if __name__ == '__main__':
... ...
ALCD/split_samples.py
... ... @@ -295,11 +295,6 @@ def k_split(in_shp, out_dir, K):
295 295  
296 296  
297 297 def main():
298   - #~ in_shp = '/mnt/data/home/baetensl/OTB_codes/OTB_commands/Full_orleans/In_data/Masks/points_clouds.shp'
299   - #~ train_shp = '/mnt/data/home/baetensl/OTB_codes/OTB_commands/Full_orleans/In_data/Masks/points_clouds_train.shp'
300   - #~ validation_shp = '/mnt/data/home/baetensl/OTB_codes/OTB_commands/Full_orleans/In_data/Masks/points_clouds_validation.shp'
301   -
302   - shp_dir = '/mnt/data/home/baetensl/classification_clouds/Data/Orleans_all/Intermediate'
303 298 shp_dir = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171002/Intermediate'
304 299 in_shp = op.join(shp_dir, 'merged.shp')
305 300  
... ... @@ -312,10 +307,8 @@ def main():
312 307 train_shp = op.join(shp_dir, 'train_points.shp')
313 308 validation_shp = op.join(shp_dir, 'validation_points.shp')
314 309  
315   -
316 310 proportion = 0.7
317 311 split_points_sample(in_shp, train_shp, validation_shp, proportion)
318   - #~ splitting(in_shp, train_shp, validation_shp, proportion)
319 312  
320 313  
321 314  
... ...
ALCD/synthese_alcd_runs.py
... ... @@ -193,14 +193,7 @@ def main():
193 193  
194 194 plot_samples_evolution_statistics(main_dirs)
195 195  
196   - #~ in_shp = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171221/Previous_iterations/SAVE_3/In_data/Masks/land.shp'
197   - #~ count_points_in_shp(in_shp)
198   - #~ directory = '/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Arles_31TFJ_20171221/Previous_iterations/SAVE_3/In_data/Masks/'
199   - #~ count_points_in_dir(directory)
200   - #~ plot_all_metrics(paths_configuration, comparison_parameters, locations, alcd_algo = 'alcd_initial', grouping = group, excluded = excluded, plot_type = plot_type)
201   -
202   -
203   -
  196 +
204 197 if __name__ == '__main__':
205 198 main()
206 199  
... ...