Commit 7251fef21931936385690c04aff580cdcbc5201f

Authored by Louis Baetens
1 parent c1d537d4
Exists in master

Change to make PCC runable with command line easily

PCC/all_run_pcc.py
... ... @@ -60,7 +60,8 @@ def create_directories(comparison_parameters):
60 60 print('Done')
61 61  
62 62  
63   -def initialization_comparison_parameters(main_dir, location, current_date, sub_data_dir):
  63 +def initialization_comparison_parameters(main_dir, location, current_date,
  64 + sub_data_dir, maja_dir_name, maja_version):
64 65 ''' To initialize the path and name in the JSON file
65 66 Must be done at the very beggining
66 67 '''
... ... @@ -84,6 +85,11 @@ def initialization_comparison_parameters(main_dir, location, current_date, sub_d
84 85 #~ data["alcd_output"]["main_dir"] = op.join(paths_configuration["data_paths"]["data_alcd"], (location + '_' + tile + '_' + current_date))
85 86 data["automatically_generated"]["alcd_main_dir"] = op.join(paths_configuration["data_paths"]["data_alcd"], (location + '_' + tile + '_' + current_date))
86 87  
  88 + data["maja_parameters"]["maja_dir_name"] = maja_dir_name
  89 + data["maja_parameters"]["maja_version"] = maja_version
  90 +
  91 +
  92 +
87 93 # Save our changes to JSON file
88 94 jsonFile = open(json_path, "w+")
89 95 jsonFile.write(json.dumps(data, indent=3, sort_keys=True))
... ... @@ -93,7 +99,8 @@ def initialization_comparison_parameters(main_dir, location, current_date, sub_d
93 99  
94 100  
95 101 def run_all(part, location, current_date, sub_data_dir, masks_already_computed = False,
96   - comparison_parameters = None, alcd_ref = 'idc', binary_only = False):
  102 + comparison_parameters = None, alcd_ref = 'idc', binary_only = False,
  103 + maja_dir_name = None, maja_version = None):
97 104 paths_configuration = json.load(open(op.join('..', 'paths_configuration.json')))
98 105 Data_PCC_dir = paths_configuration["data_paths"]["data_pcc"]
99 106 Data_ALCD_dir = paths_configuration["data_paths"]["data_alcd"]
... ... @@ -106,7 +113,8 @@ def run_all(part, location, current_date, sub_data_dir, masks_already_computed =
106 113 main_dir = op.join(Data_PCC_dir, (location + '_' + tile + '_' + current_date))
107 114 else:
108 115 main_dir = op.join(Data_PCC_dir, sub_data_dir, (location + '_' + tile + '_' + current_date))
109   - initialization_comparison_parameters(main_dir, location, current_date, sub_data_dir)
  116 + initialization_comparison_parameters(main_dir, location, current_date, sub_data_dir,
  117 + maja_dir_name = maja_dir_name, maja_version = maja_version)
110 118  
111 119 # Load the parameters
112 120 comparison_parameters = json.load(open(op.join('parameters_files','comparison_parameters.json')))
... ... @@ -140,9 +148,14 @@ def run_all(part, location, current_date, sub_data_dir, masks_already_computed =
140 148  
141 149 try:
142 150 # maja
143   - maja_cloud_mask = find_chain_directory_paths.get_mask_path(location, date_string, processing_chain='maja', mask_type='cloud', display = False)
  151 + maja_dir_name = comparison_parameters["maja_parameters"]["maja_dir_name"]
  152 + maja_version = int(comparison_parameters["maja_parameters"]["maja_version"])
  153 +
  154 + maja_cloud_mask = find_chain_directory_paths.get_mask_path(location, date_string, processing_chain='maja', mask_type='cloud',
  155 + maja_version = maja_version, maja_dir_name = maja_dir_name, display = False)
144 156 shutil.copy(maja_cloud_mask, op.join(main_dir, 'Original_data', comparison_parameters["processing"]["maja"]["cloud_mask"]))
145   - maja_geo_mask = find_chain_directory_paths.get_mask_path(location, date_string, processing_chain='maja', mask_type='geo', display = False)
  157 + maja_geo_mask = find_chain_directory_paths.get_mask_path(location, date_string, processing_chain='maja', mask_type='geo',
  158 + maja_version = maja_version, maja_dir_name = maja_dir_name, display = False)
146 159 shutil.copy(maja_geo_mask, op.join(main_dir, 'Original_data', comparison_parameters["processing"]["maja"]["geo_mask"]))
147 160 except:
148 161 logging.exception('')
... ... @@ -233,6 +246,8 @@ def main():
233 246 parser.add_argument('-m', action='store', default=None, dest='masks_already_computed', help='Bool, if the masks have already been computed')
234 247 parser.add_argument('-r', action='store', default='id', dest='alcd_ref', help='Which chains you want to compare to. Can contain i (initial), d (dilation). e.g. id for initial and dilation')
235 248 parser.add_argument('-b', action='store', default=None, dest='binary_only', help='If you only want to compute the binary difference. False will compute the multi-class difference also')
  249 + parser.add_argument('-mdir', action='store', default=None, dest='maja_dir_name', help='The name of the MAJA sub-directory you want to use')
  250 + parser.add_argument('-mver', action='store', default=3, dest='maja_version', help='The version of MAJA you use, needed for the bits interpretation')
236 251  
237 252 results = parser.parse_args()
238 253  
... ... @@ -243,6 +258,8 @@ def main():
243 258 sub_data_dir = results.sub_data_dir
244 259  
245 260  
  261 +
  262 +
246 263  
247 264 if results.binary_only == None:
248 265 binary_only = False
... ... @@ -256,8 +273,31 @@ def main():
256 273  
257 274  
258 275  
  276 + try:
  277 + maja_version = int(results.maja_version)
  278 + except ValueError:
  279 + print("Sorry, MAJA version should be an int")
  280 + raise Exception
  281 +
  282 + if maja_version <1 or maja_version >3:
  283 + print("MAJA version should be between 1 and 3")
  284 + raise Exception
  285 +
  286 +
  287 + maja_dir_name = results.maja_dir_name
  288 + if results.maja_dir_name == None:
  289 + maja_dir_name = 'MAJA_3_1_S2AS2B_MUSCATE_TM'
  290 + else:
  291 + maja_dir_name = results.maja_dir_name
  292 + try:
  293 + find_chain_directory_paths.get_processing_dir(location, current_date,
  294 + processing_chain='maja', maja_version = maja_version, maja_dir_name = maja_dir_name, display = True)
  295 + except:
  296 + print('Invalid MAJA dir name')
  297 + raise Exception
  298 +
259 299  
260   - comparison_parameters = run_all(1, location, current_date, sub_data_dir, masks_already_computed, alcd_ref)
  300 + comparison_parameters = run_all(1, location, current_date, sub_data_dir, masks_already_computed, alcd_ref, maja_dir_name = maja_dir_name, maja_version = maja_version)
261 301 # Saving the comparison parameters in a variable allows to run
262 302 # multiple PCC in parallel on different locations and dates at the same time
263 303 run_all(2, location, current_date, sub_data_dir, masks_already_computed, comparison_parameters, alcd_ref)
... ...
PCC/find_chain_directory_paths.py
... ... @@ -49,7 +49,7 @@ def check_existing_date(location, date_to_verify, processing_chain=&#39;maja&#39;, displ
49 49  
50 50  
51 51  
52   -def get_processing_dir(location, date_string, processing_chain='maja', display = True):
  52 +def get_processing_dir(location, date_string, processing_chain='maja', maja_version = 3, maja_dir_name = '', display = True):
53 53 '''
54 54 Return the directory where the outputs of the processing chain are,
55 55 for a location and date
... ... @@ -64,22 +64,15 @@ def get_processing_dir(location, date_string, processing_chain=&#39;maja&#39;, display =
64 64  
65 65 chain_dir = paths_configuration["global_chains_paths"]["maja"]
66 66  
67   - try:
68   - maja_parameters = json.load(open(op.join('parameters_files', 'maja_parameters.json')))
69   - version = int(maja_parameters["maja"]["version_for_dirs"])
70   - except:
71   - version = 2
72   -
73 67 # TO REMOVE, THIS IS A SPECIAL CASE FOR THE CESBIO SERVER
74 68 if location == 'Ispra':
75 69 location = 'Italie'
76 70  
77   - if version == 2:
  71 + if maja_version == 2 or maja_version == 1:
78 72 location_dir = glob.glob(op.join(chain_dir, location, '*{}*'.format(tile), 'MAJA_1_0_S2AS2B_NATIF*'))[0]
79 73 date_dir = glob.glob(op.join(location_dir, '*OPER_SSC*{}*.DBL.DIR'.format(date_string)))[0]
80   - elif version == 3:
81   - subdir_name = maja_parameters["maja"]["subdir_name"]
82   - location_dir = glob.glob(op.join(chain_dir, location, '*{}*'.format(tile), subdir_name))[0]
  74 + elif maja_version == 3:
  75 + location_dir = glob.glob(op.join(chain_dir, location, '*{}*'.format(tile), maja_dir_name))[0]
83 76 date_dir = glob.glob(op.join(location_dir, 'SENTINEL2*{}*_C_V1-0'.format(date_string)))[0]
84 77  
85 78  
... ... @@ -104,30 +97,27 @@ def get_processing_dir(location, date_string, processing_chain=&#39;maja&#39;, display =
104 97 return date_dir
105 98  
106 99  
107   -def get_mask_path(location, date_string, processing_chain='maja', mask_type='cloud', display = True):
  100 +def get_mask_path(location, date_string, processing_chain='maja', mask_type='cloud',
  101 + maja_version = 3, maja_dir_name = '', display = True):
108 102 '''
109 103 Return the full path of the masks returned by the chain
110 104 mask_type is for MAJA, which can be the cloud mask or the geo mask
111 105 '''
112 106  
113   - if processing_chain == 'maja':
114   - try:
115   - maja_parameters = json.load(open(op.join('parameters_files', 'maja_parameters.json')))
116   - version = int(maja_parameters["maja"]["version_for_dirs"])
117   - except:
118   - version = 2
119   -
  107 + if processing_chain == 'maja':
120 108 if mask_type=='cloud':
121   - processing_dir = get_processing_dir(location, date_string, processing_chain='maja', display = True)
122   - if version == 2:
  109 + processing_dir = get_processing_dir(location, date_string, processing_chain='maja',
  110 + maja_version = maja_version, maja_dir_name = maja_dir_name, display = False)
  111 + if maja_version == 2 or maja_version == 1:
123 112 mask_path = glob.glob(op.join(processing_dir, '*_CLD_R2.DBL.TIF'))[0]
124   - elif version == 3:
  113 + elif maja_version == 3:
125 114 mask_path = glob.glob(op.join(processing_dir, 'MASKS', '*_CLM_R2.tif'))[0]
126 115 elif mask_type=='geo':
127   - processing_dir = get_processing_dir(location, date_string, processing_chain='maja', display = False)
128   - if version == 2:
  116 + processing_dir = get_processing_dir(location, date_string, processing_chain='maja',
  117 + maja_version = maja_version, maja_dir_name = maja_dir_name, display = False)
  118 + if maja_version == 2 or maja_version == 1:
129 119 mask_path = glob.glob(op.join(processing_dir, '*_MSK_R2.DBL.TIF'))[0]
130   - elif version == 3:
  120 + elif maja_version == 3:
131 121 mask_path = glob.glob(op.join(processing_dir, 'MASKS', '*_MG2_R2.tif'))[0]
132 122  
133 123 elif processing_chain == 'sen2cor':
... ...
PCC/masks_conversion.py
... ... @@ -96,16 +96,11 @@ def maja_class_selection_expression(class_nb, valid_bits, invalid_bits, binary_s
96 96  
97 97  
98 98  
99   -def convert_mask_maja_clouds(in_tif, out_tif, binary_only = False):
  99 +def convert_mask_maja_clouds(in_tif, out_tif, maja_version, binary_only = False):
100 100 '''
101 101 Takes the cloud mask of MAJA (*_CLD_R2.DBL.TIF) and convert it
102 102 to an equivalent to the labeled one of ALCD
103 103 '''
104   - try:
105   - maja_parameters = json.load(open(op.join('parameters_files', 'maja_parameters.json')))
106   - maja_version = int(maja_parameters["maja"]["version_for_bits"])
107   - except:
108   - maja_version = 2
109 104  
110 105 print(' Converting the cloud mask from MAJA')
111 106 BandMathX = otbApplication.Registry.CreateApplication("BandMathX")
... ... @@ -133,7 +128,7 @@ def convert_mask_maja_clouds(in_tif, out_tif, binary_only = False):
133 128  
134 129 # low clouds
135 130 class_nb = '2'
136   - if maja_version == 2:
  131 + if maja_version == 2 or maja_version == 1:
137 132 valid_bits = [5,6,7] #clouds detected with threshold, change and extension
138 133 invalid_bits = [8] #not cirrus
139 134 elif maja_version == 3:
... ... @@ -144,7 +139,7 @@ def convert_mask_maja_clouds(in_tif, out_tif, binary_only = False):
144 139  
145 140 # shadows
146 141 class_nb = '4'
147   - if maja_version == 2:
  142 + if maja_version == 2 or maja_version == 1:
148 143 valid_bits = [3,4] #shadows and shadows outside the image
149 144 invalid_bits = [5,6,7,8] #not the clouds. A shadow pixel is often part
150 145 #of the clouds class, but we prefer to assign it the cloud class
... ... @@ -158,7 +153,7 @@ def convert_mask_maja_clouds(in_tif, out_tif, binary_only = False):
158 153  
159 154 # cirrus
160 155 class_nb = '3'
161   - if maja_version == 2:
  156 + if maja_version == 2 or maja_version == 1:
162 157 valid_bits = [8] #cirrus
163 158 invalid_bits = [] #cirrus are also part of clouds class, but we assign
164 159 #them the cirrus class (so no need to add 5,6,7 here)
... ... @@ -190,16 +185,16 @@ def convert_mask_maja_clouds(in_tif, out_tif, binary_only = False):
190 185  
191 186  
192 187  
193   -def convert_mask_maja_geophysical(in_tif, out_tif):
  188 +def convert_mask_maja_geophysical(in_tif, out_tif, maja_version):
194 189 '''
195 190 Takes the geophysical mask of MAJA (*_MSK_R2.DBL.TIF) and convert it
196 191 to an equivalent to the labeled one of ALCD
197 192 '''
198   - try:
199   - maja_parameters = json.load(open(op.join('parameters_files', 'maja_parameters.json')))
200   - maja_version = int(maja_parameters["maja"]["version_for_bits"])
201   - except:
202   - maja_version = 2
  193 + #~ try:
  194 + #~ maja_parameters = json.load(open(op.join('parameters_files', 'maja_parameters.json')))
  195 + #~ maja_version = int(maja_parameters["maja"]["version_for_bits"])
  196 + #~ except:
  197 + #~ maja_version = 2
203 198  
204 199 print(' Converting the geophysical mask from MAJA')
205 200 BandMathX = otbApplication.Registry.CreateApplication("BandMathX")
... ... @@ -219,7 +214,7 @@ def convert_mask_maja_geophysical(in_tif, out_tif):
219 214  
220 215 # water
221 216 class_nb = '6'
222   - if maja_version == 2:
  217 + if maja_version == 2 or maja_version == 1:
223 218 valid_bits = [1] # bit for the water is 1st
224 219 elif maja_version == 3:
225 220 valid_bits = [1]
... ... @@ -230,7 +225,7 @@ def convert_mask_maja_geophysical(in_tif, out_tif):
230 225  
231 226 # snow
232 227 class_nb = '7'
233   - if maja_version == 2:
  228 + if maja_version == 2 or maja_version == 1:
234 229 valid_bits = [6] # bit for snow is 6th
235 230 elif maja_version == 3:
236 231 valid_bits = [3]
... ... @@ -249,7 +244,7 @@ def convert_mask_maja_geophysical(in_tif, out_tif):
249 244 BandMathX.ExecuteAndWriteOutput()
250 245 print('Done')
251 246  
252   -def convert_mask_maja(maja_cloud_mask, maja_geo_mask, out_tif, binary_only = False):
  247 +def convert_mask_maja(maja_cloud_mask, maja_geo_mask, out_tif, maja_version, binary_only = False):
253 248 '''
254 249 Takes the original cloud (*_CLD_R2.DBL.TIF) and
255 250 geopysical (*_MSK_R2.DBL.TIF) masks of MAJA, converts it to equivalent
... ... @@ -260,7 +255,7 @@ def convert_mask_maja(maja_cloud_mask, maja_geo_mask, out_tif, binary_only = Fal
260 255  
261 256 print(' Converting the totality of the masks from MAJA')
262 257 out_cloud = 'tmp/mask_maja_clouds_{}.tif'.format(current_time)
263   - convert_mask_maja_clouds(maja_cloud_mask, out_cloud, binary_only = binary_only)
  258 + convert_mask_maja_clouds(maja_cloud_mask, out_cloud, binary_only = binary_only, maja_version = maja_version)
264 259 if binary_only:
265 260 # If it is just the two-class that we want, we can use only the
266 261 # cloud mask without the geo one
... ... @@ -269,7 +264,7 @@ def convert_mask_maja(maja_cloud_mask, maja_geo_mask, out_tif, binary_only = Fal
269 264  
270 265 else:
271 266 out_geo = 'tmp/mask_maja_geo_{}.tif'.format(current_time)
272   - convert_mask_maja_geophysical(maja_geo_mask, out_geo)
  267 + convert_mask_maja_geophysical(maja_geo_mask, out_geo, maja_version = maja_version)
273 268  
274 269 BandMathX = otbApplication.Registry.CreateApplication("BandMathX")
275 270 BandMathX.SetParameterStringList("il", [str(out_cloud), str(out_geo)])
... ... @@ -560,10 +555,12 @@ def convert_all_masks(comparison_parameters, masks_already_computed = False, bin
560 555 if not masks_already_computed:
561 556 # --- MAJA
562 557 try:
  558 +
  559 + maja_version = int(comparison_parameters["maja_parameters"]["maja_version"])
563 560 #binary
564 561 out_name = 'binary_' + comparison_parameters["processing"]["maja"]["prefix"] + suffix + '.tif'
565 562 out_tif_maja = op.join(binary_classif_dir, out_name)
566   - convert_mask_maja(maja_cloud_mask_original, maja_geo_mask_original, out_tif_maja, binary_only = True)
  563 + convert_mask_maja(maja_cloud_mask_original, maja_geo_mask_original, out_tif_maja, binary_only = True, maja_version = maja_version)
567 564  
568 565 #eroded
569 566 maja_initial_binary_tif = out_tif_maja
... ... @@ -576,7 +573,7 @@ def convert_all_masks(comparison_parameters, masks_already_computed = False, bin
576 573 if not binary_only:
577 574 out_name = 'multi_' + comparison_parameters["processing"]["maja"]["prefix"] + suffix + '.tif'
578 575 out_tif_maja = op.join(multi_classif_dir, out_name)
579   - convert_mask_maja(maja_cloud_mask_original, maja_geo_mask_original, out_tif_maja, binary_only = False)
  576 + convert_mask_maja(maja_cloud_mask_original, maja_geo_mask_original, out_tif_maja, binary_only = False, maja_version = maja_version)
580 577 except:
581 578 logging.exception('')
582 579  
... ...
PCC/parameters_files/comparison_parameters.json
... ... @@ -7,13 +7,17 @@
7 7 "resolution": "60"
8 8 },
9 9 "automatically_generated": {
10   - "alcd_main_dir": "/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Alta_Floresta_Brazil_21LWK_20180813",
11   - "current_date": "20180813",
12   - "location": "Alta_Floresta_Brazil",
13   - "main_dir": "/mnt/data/home/baetensl/clouds_detection_git/Data_PCC_testtime/BLOU/Alta_Floresta_Brazil_21LWK_20180813",
14   - "raw_img": "Alta_Floresta_Brazil_bands.tif",
15   - "sub_data_dir": "BLOU",
16   - "tile": "21LWK"
  10 + "alcd_main_dir": "/mnt/data/home/baetensl/clouds_detection_git/Data_ALCD/Orleans_31UDP_20180218",
  11 + "current_date": "20180218",
  12 + "location": "Orleans",
  13 + "main_dir": "/mnt/data/home/baetensl/clouds_detection_git/Data_PCC_testtime/HOT0016/Orleans_31UDP_20180218",
  14 + "raw_img": "Orleans_bands.tif",
  15 + "sub_data_dir": "HOT0016",
  16 + "tile": "31UDP"
  17 + },
  18 + "maja_parameters": {
  19 + "maja_dir_name": "MAJA_3_1_S2AS2B_HOT016",
  20 + "maja_version": 3
17 21 },
18 22 "processing": {
19 23 "alcd_cirrus": {
... ...
PCC/parameters_files/maja_parameters.json
... ... @@ -1,7 +0,0 @@
1   -{
2   - "maja": {
3   - "subdir_name": "MAJA_3_1_S2AS2B_HOT017",
4   - "version_for_dirs": "3",
5   - "version_for_bits": "3"
6   - }
7   -}
PCC/parameters_files/maja_parameters_old.json 0 → 100644
... ... @@ -0,0 +1,7 @@
  1 +{
  2 + "maja": {
  3 + "subdir_name": "MAJA_3_1_S2AS2B_HOT017",
  4 + "version_for_dirs": "3",
  5 + "version_for_bits": "3"
  6 + }
  7 +}
... ...
PCC/statistics_synthesis.py
... ... @@ -9,13 +9,17 @@ import json
9 9 import csv
10 10 from matplotlib.lines import Line2D
11 11 import datetime
  12 +import argparse
12 13  
13 14  
14   -def get_main_directories(paths_configuration, locations):
  15 +def get_main_directories(paths_configuration, pcc_sub_dir, locations):
15 16 '''
16 17 Return the main directory of all the prescribed locations
17 18 '''
18 19 Data_PCC_dir = paths_configuration["data_paths"]["data_pcc"]
  20 + if pcc_sub_dir != None:
  21 + print(pcc_sub_dir)
  22 + Data_PCC_dir = op.join(Data_PCC_dir, pcc_sub_dir)
19 23 main_dirs = []
20 24 for location in locations:
21 25 main_dirs.extend(glob.glob(op.join(Data_PCC_dir, '*' + location + '*')))
... ... @@ -23,11 +27,11 @@ def get_main_directories(paths_configuration, locations):
23 27  
24 28  
25 29  
26   -def get_all_stats_files(paths_configuration, locations, alcd_algo_dir_name):
  30 +def get_all_stats_files(paths_configuration, pcc_sub_dir, locations, alcd_algo_dir_name):
27 31 '''
28 32 Return the binary difference stats files from the locations
29 33 '''
30   - main_dirs = get_main_directories(paths_configuration, locations)
  34 + main_dirs = get_main_directories(paths_configuration, pcc_sub_dir, locations)
31 35 potential_files = []
32 36 for main_dir in main_dirs:
33 37 potential_files.extend(glob.glob(op.join(main_dir, 'Statistics', alcd_algo_dir_name, '*binary*.json')))
... ... @@ -63,19 +67,29 @@ def get_data_and_means(stats_files):
63 67  
64 68  
65 69  
66   -def make_table_metrics(paths_configuration, comparison_parameters, locations, alcd_algo = 'alcd_initial',
67   - excluded = [], maja_erode = False, cas_alcd = None,
  70 +def make_table_metrics(paths_configuration, comparison_parameters, scenes_csv,
  71 + pcc_sub_dir = None, excluded = [], cas_alcd = None,
68 72 print_all_stats = True, file_suffix = ''):
69 73 '''
70 74 Make tables with the results of the chains on each scene
71 75 '''
72   -
73   - csv_path = '/mnt/data/home/baetensl/clouds_detection_git/Various_data/all_scenes.csv'
  76 +
  77 + if cas_alcd == 1:
  78 + alcd_algo = 'alcd_initial'
  79 + maja_erode = False
  80 + elif cas_alcd == 2:
  81 + alcd_algo = 'alcd_dilat'
  82 + maja_erode = False
  83 + elif cas_alcd == 3:
  84 + alcd_algo = 'alcd_initial'
  85 + maja_erode = True
  86 +
74 87  
75 88 all_scenes_names = []
76 89  
77 90 # append all the potential informations
78   - with open(csv_path, mode='r') as infile:
  91 + locations = []
  92 + with open(scenes_csv, mode='r') as infile:
79 93 reader = csv.reader(infile)
80 94 reader.next()
81 95 dict_scenes = {}
... ... @@ -84,18 +98,19 @@ def make_table_metrics(paths_configuration, comparison_parameters, locations, al
84 98 all_scenes_names.append(row[0])
85 99 dict_scenes[row[0]] = {}
86 100 dict_scenes[row[0]]['location'] = row[1]
  101 + locations.extend(row[1])
87 102 dict_scenes[row[0]]['clear_date'] = row[2]
88 103 dict_scenes[row[0]]['cloudy_date'] = row[3]
89 104 dict_scenes[row[0]]['tile'] = row[4]
90 105 dict_scenes[row[0]]['scene_id'] = scene_id
91 106 scene_id += 1
92 107  
93   -
  108 + locations = list(set(locations))
94 109 # current ALCD reference sub dir (e.g. initial or dilated)
95 110 alcd_algo_dir_name = comparison_parameters["processing"][alcd_algo]["sub_dir"]
96 111  
97 112 # get all the statistics files from all the directories
98   - all_stats_files_temp = get_all_stats_files(paths_configuration, locations, alcd_algo_dir_name)
  113 + all_stats_files_temp = get_all_stats_files(paths_configuration, pcc_sub_dir, locations, alcd_algo_dir_name)
99 114 all_stats_files = list(all_stats_files_temp)
100 115 if len(excluded) != 0:
101 116 for a in all_stats_files_temp:
... ... @@ -133,6 +148,10 @@ def make_table_metrics(paths_configuration, comparison_parameters, locations, al
133 148 full_sen2cor_txt = []
134 149 full_fmask_txt = []
135 150  
  151 + full_maja_data = []
  152 + full_sen2cor_data = []
  153 + full_fmask_data = []
  154 +
136 155 for scene_name in all_scenes_names:
137 156 # go through all the potential scenes
138 157 scene_maja_stats = [m for m in maja_stats if scene_name in m]
... ... @@ -148,16 +167,19 @@ def make_table_metrics(paths_configuration, comparison_parameters, locations, al
148 167 # try, to avoid having an empty list
149 168 try:
150 169 data_maja = get_metrics_from_stats(scene_maja_stats[0])
  170 + full_maja_data.append(data_maja)
151 171 data_maja_txt = ['{:.03f}'.format(m) for m in data_maja]
152 172 except:
153 173 data_maja_txt = ['/']*4
154 174 try:
155 175 data_sen2cor = get_metrics_from_stats(scene_sen2cor_stats[0])
  176 + full_sen2cor_data.append(data_sen2cor)
156 177 data_sen2cor_txt = ['{:.03f}'.format(m) for m in data_sen2cor]
157 178 except:
158 179 data_sen2cor_txt = ['/']*4
159 180 try:
160 181 data_fmask = get_metrics_from_stats(scene_fmask_stats[0])
  182 + full_fmask_data.append(data_fmask)
161 183 data_fmask_txt = ['{:.03f}'.format(m) for m in data_fmask]
162 184 except:
163 185 data_fmask_txt = ['/']*4
... ... @@ -206,15 +228,129 @@ def make_table_metrics(paths_configuration, comparison_parameters, locations, al
206 228 print(row)
207 229 writer.writerow(row)
208 230  
  231 + return full_maja_data, full_sen2cor_data, full_fmask_data
209 232  
210 233  
211 234  
  235 +
  236 +
  237 +
  238 +
  239 +
  240 +
  241 +def plot_all_metrics_simplified(data_maja, data_sen2cor, data_fmask, out_fig_name):
  242 + '''
  243 + Plot 4 metrics for all the processing chains
  244 + '''
  245 +
  246 + # Take the transpose of the stats
  247 + data_maja = map(list, zip(*data_maja))
  248 + data_sen2cor = map(list, zip(*data_sen2cor))
  249 + data_fmask = map(list, zip(*data_fmask))
  250 +
  251 + print('Nb of sites: {}'.format(len(data_maja[0])))
  252 +
  253 + nb_metrics = 4
  254 + nb_chains = 3
  255 + positions = []
212 256  
213 257  
  258 + width = 1./(nb_metrics)
  259 + for chain in range(nb_chains):
  260 + positions.append((np.arange(chain*width, 4+chain*width, 1) + width/2))
214 261  
215 262  
  263 + # Positions of the future bars
  264 + positions_maja = positions[0]
  265 + positions_sen2cor = positions[1]
  266 + positions_fmask = positions[2]
  267 +
216 268  
217   -def plot_all_metrics(paths_configuration, comparison_parameters, locations, alcd_algo = 'alcd_initial',
  269 + fig, ax = plt.subplots()
  270 + ax.yaxis.grid(True) # put horizontal grid
  271 +
  272 + colors=[[0.0, 0.69, 0.20], [0.0, 0.77, 0.80], [0.80, 0.62, 0.0]]
  273 + all_legend_names = ['MAJA', 'Sen2Cor', 'Fmask']
  274 + ticks_names = ['Accuracy', 'F1-score', 'Recall', 'Precision']
  275 + ticks_positions = [ 0.125, 1.125, 2.125, 3.125]
  276 + ticks_positions = positions[0]
  277 + plt.xlabel('Score type')
  278 +
  279 + all_means = []
  280 + nb_of_scenes_per_chain = []
  281 + for data in [data_maja, data_sen2cor, data_fmask]:
  282 + nb_of_scenes_per_chain.append(len(data[0]))
  283 + all_means.extend(np.mean(data, axis = 1))
  284 +
  285 + all_positions = []
  286 + for positions_list in [positions_maja, positions_sen2cor, positions_fmask]:
  287 + all_positions.extend(positions_list)
  288 +
  289 +
  290 +
  291 + outliers = '' # put 'ow' to have outliers as white points
  292 + boxes_maja = plt.boxplot(data_maja, sym=outliers, positions=positions_maja, widths = width, patch_artist=True)
  293 + boxes_sen2cor = plt.boxplot(data_sen2cor, sym=outliers, positions=positions_sen2cor, widths = width, patch_artist=True)
  294 + boxes_fmask = plt.boxplot(data_fmask, sym=outliers, positions=positions_fmask, widths = width, patch_artist=True)
  295 +
  296 +
  297 + colors_maps = []
  298 + for k in range(len(colors)):
  299 + for l in range(nb_metrics):
  300 + colors_maps.append(colors[k])
  301 + k = 0
  302 + for bplot in (boxes_maja, boxes_sen2cor, boxes_fmask):
  303 + for patch in bplot['boxes']:
  304 + colo_spec = colors_maps[k]
  305 + patch.set_facecolor(colo_spec)
  306 + k += 1
  307 +
  308 + plt.plot(all_positions, all_means, 'ro')
  309 +
  310 + plt.ylim(0,1)
  311 + plt.xlim(0,4.0)
  312 + plt.ylabel('Scores')
  313 +
  314 +
  315 + plt.xticks(([t + width for t in ticks_positions]), ticks_names)
  316 +
  317 + # legend customization
  318 + custom_lines = [Line2D([0], [0], color=colors[j], lw=4) for j in range(len(all_legend_names))]
  319 + custom_lines.append(Line2D([0], [0], color='w', markerfacecolor = 'r', marker='o'))
  320 + all_legend_names.append('Means')
  321 + plt.legend(custom_lines, all_legend_names, bbox_to_anchor=(1.05, 1), loc=2, borderaxespad=0.)
  322 +
  323 + if len(set(nb_of_scenes_per_chain)) == 0:
  324 + plt.title('Statistics over {} images'.format(nb_of_scenes_per_chain))
  325 + else:
  326 + plt.title('Statistics over {}/{}/{} images'.format(nb_of_scenes_per_chain[0],
  327 + nb_of_scenes_per_chain[1], nb_of_scenes_per_chain[2]))
  328 +
  329 + #~ plt.show(block = False)
  330 +
  331 + out_fig = op.join('tmp_maja_improvement', out_fig_name)
  332 + plt.savefig(out_fig, bbox_inches='tight')
  333 + plt.close()
  334 +
  335 +
  336 + return all_means
  337 +
  338 +
  339 +
  340 +
  341 +
  342 +
  343 +
  344 +
  345 +
  346 +
  347 +
  348 +
  349 +
  350 +
  351 +
  352 +
  353 +def plot_all_metrics_deprecated(paths_configuration, comparison_parameters, locations, alcd_algo = 'alcd_initial',
218 354 grouping = 'by_chain', plot_type = 'errorbar', excluded = [], maja_erode = False, cas_alcd = None):
219 355 '''
220 356 Plot 4 metrics for all the processing chains
... ... @@ -416,9 +552,42 @@ def plot_all_metrics(paths_configuration, comparison_parameters, locations, alcd
416 552  
417 553  
418 554 def main():
  555 + out_dir = 'tmp_maja_improvement'
  556 + if not op.exists(out_dir):
  557 + os.makedirs(out_dir)
  558 + print(out_dir + ' created')
  559 +
  560 + parser = argparse.ArgumentParser()
  561 +
  562 + parser.add_argument('-csv', action='store', default=None, dest='scenes_csv', help='Path to the csv containing all the scenes infos')
  563 + parser.add_argument('-s', action='store', default='HOT016', dest='pcc_sub_dir', help='Name of the PCC subdir (e.g. HOT016)')
  564 + parser.add_argument('-case', action='store', default=2, dest='cas_alcd', help='Number of the ALCD case (1, 2 or 3)')
  565 + results = parser.parse_args()
  566 +
419 567 paths_configuration = json.load(open(op.join('..', 'paths_configuration.json')))
420 568 comparison_parameters = json.load(open(op.join('parameters_files','comparison_parameters.json')))
421 569  
  570 +
  571 + if results.scenes_csv != None:
  572 + scenes_csv = results.scenes_csv
  573 + else:
  574 + scenes_csv = '../Tools/all_scenes.csv'
  575 +
  576 + pcc_sub_dir = results.pcc_sub_dir
  577 + cas_alcd = int(results.cas_alcd)
  578 +
  579 + data_maja, data_sen2cor, data_fmask = make_table_metrics(paths_configuration, comparison_parameters, scenes_csv,
  580 + pcc_sub_dir = pcc_sub_dir, excluded = [], cas_alcd = cas_alcd,
  581 + print_all_stats = True, file_suffix = pcc_sub_dir)
  582 +
  583 + out_fig_name = 'stats_synthesis_{}_cas{}.png'.format(pcc_sub_dir, cas_alcd)
  584 +
  585 + plot_all_metrics_simplified(data_maja, data_sen2cor, data_fmask, out_fig_name)
  586 +
  587 +
  588 +
  589 + return
  590 +
422 591 locations = ['Arles', 'Gobabeb', 'Ispra', 'Marrakech', 'Mongu', 'Orleans', 'Pretoria', 'RailroadValley',
423 592 'Munich', 'Alta_Floresta_Brazil']
424 593 excluded = []
... ... @@ -445,7 +614,7 @@ def main():
445 614 #~ cas_alcd = cas_alcd)
446 615  
447 616 file_suffix = 'HOT17'
448   - make_table_metrics(paths_configuration, comparison_parameters, locations, alcd_algo, excluded = excluded, maja_erode = maja_erode, cas_alcd = cas_alcd, file_suffix = file_suffix)
  617 +
449 618  
450 619  
451 620  
... ...
Tools/.~lock.all_scenes.csv# 0 → 100644
... ... @@ -0,0 +1 @@
  1 +,baetensl,s2calc.cesbio.cnes.fr,08.10.2018 10:24,file:///mnt/data/home/baetensl/.config/libreoffice/4;
0 2 \ No newline at end of file
... ...
Tools/all_scenes.csv 0 → 100644
... ... @@ -0,0 +1,33 @@
  1 +Name,Location,Clear date,Cloudy date,Tile
  2 +Arles_20170917,Arles,20170920,20170917,31TFJ
  3 +Arles_20171002,Arles,20171005,20171002,31TFJ
  4 +Arles_20171221,Arles,20171224,20171221,31TFJ
  5 +Gobabeb_20161221,Gobabeb,20161231,20161221,33KWP
  6 +Gobabeb_20170909,Gobabeb,20170914,20170909,33KWP
  7 +Gobabeb_20171014,Gobabeb,20171019,20171014,33KWP
  8 +Gobabeb_20180209,Gobabeb,20180214,20180209,33KWP
  9 +Ispra_20170815,Ispra,20170820,20170815,32TMR
  10 +Ispra_20171009,Ispra,20171014,20171009,32TMR
  11 +Ispra_20171111,Ispra,20171116,20171111,32TMR
  12 +Marrakech_20160417,Marrakech,20160427,20160417,29RPQ
  13 +Marrakech_20170621,Marrakech,20170701,20170621,29RPQ
  14 +Marrakech_20171218,Marrakech,20171223,20171218,29RPQ
  15 +Mongu_20161112,Mongu,20161202,20161112,34LGJ
  16 +Mongu_20170804,Mongu,20170809,20170804,34LGJ
  17 +Mongu_20171013,Mongu,20171018,20171013,34LGJ
  18 +Orleans_20170516,Orleans,20170526,20170516,31UDP
  19 +Orleans_20170819,Orleans,20170829,20170819,31UDP
  20 +Orleans_20180218,Orleans,20180225,20180218,31UDP
  21 +Pretoria_20170313,Pretoria,20170323,20170313,35JPM
  22 +Pretoria_20170820,Pretoria,20170825,20170820,35JPM
  23 +Pretoria_20171014,Pretoria,20171019,20171014,35JPM
  24 +Pretoria_20171213,Pretoria,20171218,20171213,35JPM
  25 +RailroadValley_20170501,RailroadValley,20170504,20170501,11SPC
  26 +RailroadValley_20170827,RailroadValley,20170901,20170827,11SPC
  27 +RailroadValley_20180213,RailroadValley,20180218,20180213,11SPC
  28 +Munich_20180422,Munich,20180427,20180422,32UPU
  29 +Munich_20180424,Munich,20180427,20180424,32UPU
  30 +Alta_Floresta_Brazil_20180505,Alta_Floresta_Brazil,20180510,20180505,21LWK
  31 +Alta_Floresta_Brazil_20180609,Alta_Floresta_Brazil,20180624,20180609,22LWK
  32 +Alta_Floresta_Brazil_20180714,Alta_Floresta_Brazil,20180724,20180714,23LWK
  33 +Alta_Floresta_Brazil_20180813,Alta_Floresta_Brazil,20180823,20180813,24LWK
... ...
Tools/create_pcc_launch.py
... ... @@ -27,14 +27,28 @@ def open_csv(csv_path):
27 27 print(cloudy_dates)
28 28 return locations, clear_dates, cloudy_dates
29 29  
30   -def create_pcc_launch(locations, dates, out_file, serial = False, sub_data_dir = ''):
  30 +def create_pcc_launch(locations, dates, out_file_dir, pcc_code_dir, serial = False, sub_data_dir = None,
  31 + maja_version = None, maja_dir_name = None):
31 32 commands = []
32 33 for i in range(len(locations)):
33   - commands.append("cd /mnt/data/home/baetensl/clouds_detection_git/PCC\n")
34   - if sub_data_dir == '':
35   - commands.append("python all_run_pcc.py -l {} -d {}\n".format(locations[i], dates[i]))
36   - else:
37   - commands.append("python all_run_pcc.py -l {} -d {} -s {}\n".format(locations[i], dates[i], sub_data_dir))
  34 + #~ commands.append("cd /mnt/data/home/baetensl/clouds_detection_git/PCC\n")
  35 + commands.append("cd {}\n".format(pcc_code_dir))
  36 + additional_parameters = ''
  37 + if sub_data_dir != None:
  38 + additional_parameters += ' -s {}'.format(sub_data_dir)
  39 + if maja_version != None:
  40 + additional_parameters += ' -mver {}'.format(maja_version)
  41 + if maja_dir_name != None:
  42 + additional_parameters += ' -mdir {}'.format(maja_dir_name)
  43 +
  44 + pcc_command = "python all_run_pcc.py -l {} -d {}{}\n".format(locations[i], dates[i], additional_parameters)
  45 + commands.append(pcc_command)
  46 +
  47 + if not op.exists(out_file_dir):
  48 + os.makedirs(out_file_dir)
  49 + print(out_file_dir + ' created')
  50 +
  51 + out_file = op.join(out_file_dir, 'launch_pcc')
38 52  
39 53 # write shell files to launch the PEPS download
40 54 if not serial: #create multiple .sh to allow parallel computing
... ... @@ -63,19 +77,26 @@ def main():
63 77 parser = argparse.ArgumentParser()
64 78  
65 79 parser.add_argument('-csv', action='store', default=None, dest='csv_path', help='CSV path containing the locations and dates')
66   - parser.add_argument('-s', action='store', default='', dest='sub_data_dir', help='Sub dir name for the PCC')
  80 + parser.add_argument('-s', action='store', default=None, dest='sub_data_dir', help='Sub dir name for the PCC')
  81 + parser.add_argument('-mver', action='store', default=None, dest='maja_version', help='MAJA version')
  82 + parser.add_argument('-mdir', action='store', default=None, dest='maja_dir_name', help='MAJA sub dir name')
  83 + parser.add_argument('-pcc', action='store', default='/mnt/data/home/baetensl/clouds_detection_git/PCCer', dest='pcc_code_dir', help='Path of the dir where the PCC codes are')
67 84  
68 85 results = parser.parse_args()
69 86 csv_path = results.csv_path
70 87 sub_data_dir = results.sub_data_dir
  88 + maja_version = results.maja_version
  89 + maja_dir_name = results.maja_dir_name
  90 + pcc_code_dir = results.pcc_code_dir
71 91  
72 92 if csv_path == None:
73   - csv_path = '../Various_data/all_scenes.csv'
  93 + csv_path = 'all_scenes.csv'
74 94  
75 95 locations, clear_dates, cloudy_dates = open_csv(csv_path)
76 96  
77   - out_file_dir = 'pcc_launch/launch_pcc'
78   - create_pcc_launch(locations, cloudy_dates, out_file_dir, serial = True, sub_data_dir = sub_data_dir)
  97 + out_file_dir = 'pcc_launch'
  98 + create_pcc_launch(locations, cloudy_dates, out_file_dir, pcc_code_dir = pcc_code_dir, serial = True, sub_data_dir = sub_data_dir,
  99 + maja_version = maja_version, maja_dir_name = maja_dir_name)
79 100  
80 101  
81 102  
... ...
Tools/pcc_launch/launch_pcc_0.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Arles -d 20170917
  2 +python all_run_pcc.py -l Arles -d 20170917 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Arles -d 20171002
  4 +python all_run_pcc.py -l Arles -d 20171002 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Arles -d 20171221
  6 +python all_run_pcc.py -l Arles -d 20171221 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l Gobabeb -d 20161221
  8 +python all_run_pcc.py -l Gobabeb -d 20161221 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_1.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Gobabeb -d 20170909
  2 +python all_run_pcc.py -l Gobabeb -d 20170909 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Gobabeb -d 20171014
  4 +python all_run_pcc.py -l Gobabeb -d 20171014 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Gobabeb -d 20180209
  6 +python all_run_pcc.py -l Gobabeb -d 20180209 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l Ispra -d 20170815
  8 +python all_run_pcc.py -l Ispra -d 20170815 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_2.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Ispra -d 20171009
  2 +python all_run_pcc.py -l Ispra -d 20171009 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Ispra -d 20171111
  4 +python all_run_pcc.py -l Ispra -d 20171111 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Marrakech -d 20160417
  6 +python all_run_pcc.py -l Marrakech -d 20160417 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l Marrakech -d 20170621
  8 +python all_run_pcc.py -l Marrakech -d 20170621 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_3.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Marrakech -d 20171218
  2 +python all_run_pcc.py -l Marrakech -d 20171218 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Mongu -d 20161112
  4 +python all_run_pcc.py -l Mongu -d 20161112 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Mongu -d 20170804
  6 +python all_run_pcc.py -l Mongu -d 20170804 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l Mongu -d 20171013
  8 +python all_run_pcc.py -l Mongu -d 20171013 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_4.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Orleans -d 20170516
  2 +python all_run_pcc.py -l Orleans -d 20170516 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Orleans -d 20170819
  4 +python all_run_pcc.py -l Orleans -d 20170819 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Orleans -d 20180218
  6 +python all_run_pcc.py -l Orleans -d 20180218 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l Pretoria -d 20170313
  8 +python all_run_pcc.py -l Pretoria -d 20170313 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_5.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Pretoria -d 20170820
  2 +python all_run_pcc.py -l Pretoria -d 20170820 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Pretoria -d 20171014
  4 +python all_run_pcc.py -l Pretoria -d 20171014 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Pretoria -d 20171213
  6 +python all_run_pcc.py -l Pretoria -d 20171213 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l RailroadValley -d 20170501
  8 +python all_run_pcc.py -l RailroadValley -d 20170501 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_6.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Munich -d 20180422
  2 +python all_run_pcc.py -l RailroadValley -d 20170827 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Munich -d 20180424
  4 +python all_run_pcc.py -l RailroadValley -d 20180213 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
  5 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  6 +python all_run_pcc.py -l Munich -d 20180422 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
  7 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  8 +python all_run_pcc.py -l Munich -d 20180424 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_7.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180505
  2 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180505 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180609
  4 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180609 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
  5 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  6 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180714 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
  7 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  8 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180813 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launch/launch_pcc_8.sh
... ... @@ -1,4 +0,0 @@
1   -cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180714
3   -cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180813
Tools/pcc_launch/launch_pcc_all.sh
1 1 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
2   -python all_run_pcc.py -l Arles -d 20170917 -s BLOU
  2 +python all_run_pcc.py -l Arles -d 20170917 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
3 3 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
4   -python all_run_pcc.py -l Arles -d 20171002 -s BLOU
  4 +python all_run_pcc.py -l Arles -d 20171002 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
5 5 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
6   -python all_run_pcc.py -l Arles -d 20171221 -s BLOU
  6 +python all_run_pcc.py -l Arles -d 20171221 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
7 7 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
8   -python all_run_pcc.py -l Gobabeb -d 20161221 -s BLOU
  8 +python all_run_pcc.py -l Gobabeb -d 20161221 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
9 9 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
10   -python all_run_pcc.py -l Gobabeb -d 20170909 -s BLOU
  10 +python all_run_pcc.py -l Gobabeb -d 20170909 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
11 11 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
12   -python all_run_pcc.py -l Gobabeb -d 20171014 -s BLOU
  12 +python all_run_pcc.py -l Gobabeb -d 20171014 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
13 13 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
14   -python all_run_pcc.py -l Gobabeb -d 20180209 -s BLOU
  14 +python all_run_pcc.py -l Gobabeb -d 20180209 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
15 15 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
16   -python all_run_pcc.py -l Ispra -d 20170815 -s BLOU
  16 +python all_run_pcc.py -l Ispra -d 20170815 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
17 17 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
18   -python all_run_pcc.py -l Ispra -d 20171009 -s BLOU
  18 +python all_run_pcc.py -l Ispra -d 20171009 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
19 19 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
20   -python all_run_pcc.py -l Ispra -d 20171111 -s BLOU
  20 +python all_run_pcc.py -l Ispra -d 20171111 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
21 21 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
22   -python all_run_pcc.py -l Marrakech -d 20160417 -s BLOU
  22 +python all_run_pcc.py -l Marrakech -d 20160417 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
23 23 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
24   -python all_run_pcc.py -l Marrakech -d 20170621 -s BLOU
  24 +python all_run_pcc.py -l Marrakech -d 20170621 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
25 25 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
26   -python all_run_pcc.py -l Marrakech -d 20171218 -s BLOU
  26 +python all_run_pcc.py -l Marrakech -d 20171218 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
27 27 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
28   -python all_run_pcc.py -l Mongu -d 20161112 -s BLOU
  28 +python all_run_pcc.py -l Mongu -d 20161112 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
29 29 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
30   -python all_run_pcc.py -l Mongu -d 20170804 -s BLOU
  30 +python all_run_pcc.py -l Mongu -d 20170804 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
31 31 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
32   -python all_run_pcc.py -l Mongu -d 20171013 -s BLOU
  32 +python all_run_pcc.py -l Mongu -d 20171013 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
33 33 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
34   -python all_run_pcc.py -l Orleans -d 20170516 -s BLOU
  34 +python all_run_pcc.py -l Orleans -d 20170516 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
35 35 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
36   -python all_run_pcc.py -l Orleans -d 20170819 -s BLOU
  36 +python all_run_pcc.py -l Orleans -d 20170819 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
37 37 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
38   -python all_run_pcc.py -l Orleans -d 20180218 -s BLOU
  38 +python all_run_pcc.py -l Orleans -d 20180218 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
39 39 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
40   -python all_run_pcc.py -l Pretoria -d 20170313 -s BLOU
  40 +python all_run_pcc.py -l Pretoria -d 20170313 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
41 41 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
42   -python all_run_pcc.py -l Pretoria -d 20170820 -s BLOU
  42 +python all_run_pcc.py -l Pretoria -d 20170820 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
43 43 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
44   -python all_run_pcc.py -l Pretoria -d 20171014 -s BLOU
  44 +python all_run_pcc.py -l Pretoria -d 20171014 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
45 45 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
46   -python all_run_pcc.py -l Pretoria -d 20171213 -s BLOU
  46 +python all_run_pcc.py -l Pretoria -d 20171213 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
47 47 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
48   -python all_run_pcc.py -l RailroadValley -d 20170501 -s BLOU
  48 +python all_run_pcc.py -l RailroadValley -d 20170501 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
49 49 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
50   -python all_run_pcc.py -l RailroadValley -d 20170827 -s BLOU
  50 +python all_run_pcc.py -l RailroadValley -d 20170827 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
51 51 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
52   -python all_run_pcc.py -l RailroadValley -d 20180213 -s BLOU
  52 +python all_run_pcc.py -l RailroadValley -d 20180213 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
53 53 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
54   -python all_run_pcc.py -l Munich -d 20180422 -s BLOU
  54 +python all_run_pcc.py -l Munich -d 20180422 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
55 55 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
56   -python all_run_pcc.py -l Munich -d 20180424 -s BLOU
  56 +python all_run_pcc.py -l Munich -d 20180424 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
57 57 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
58   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180505 -s BLOU
  58 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180505 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
59 59 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
60   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180609 -s BLOU
  60 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180609 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
61 61 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
62   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180714 -s BLOU
  62 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180714 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
63 63 cd /mnt/data/home/baetensl/clouds_detection_git/PCC
64   -python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180813 -s BLOU
  64 +python all_run_pcc.py -l Alta_Floresta_Brazil -d 20180813 -s HOT0016 -mver 3 -mdir MAJA_3_1_S2AS2B_HOT016
... ...
Tools/pcc_launchze/launch_pcc_0.sh 0 → 100644
... ... @@ -0,0 +1,8 @@
  1 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  2 +python all_run_pcc.py -l Arles -d 20170917
  3 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  4 +python all_run_pcc.py -l Arles -d 20171002
  5 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  6 +python all_run_pcc.py -l Arles -d 20171221
  7 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  8 +python all_run_pcc.py -l Gobabeb -d 20161221
... ...
Tools/pcc_launchze/launch_pcc_1.sh 0 → 100644
... ... @@ -0,0 +1,8 @@
  1 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  2 +python all_run_pcc.py -l Gobabeb -d 20170909
  3 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  4 +python all_run_pcc.py -l Gobabeb -d 20171014
  5 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  6 +python all_run_pcc.py -l Gobabeb -d 20180209
  7 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  8 +python all_run_pcc.py -l Ispra -d 20170815
... ...
Tools/pcc_launchze/launch_pcc_2.sh 0 → 100644
... ... @@ -0,0 +1,8 @@
  1 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  2 +python all_run_pcc.py -l Ispra -d 20171009
  3 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  4 +python all_run_pcc.py -l Ispra -d 20171111
  5 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  6 +python all_run_pcc.py -l Marrakech -d 20160417
  7 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  8 +python all_run_pcc.py -l Marrakech -d 20170621
... ...
Tools/pcc_launchze/launch_pcc_3.sh 0 → 100644
... ... @@ -0,0 +1,8 @@
  1 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  2 +python all_run_pcc.py -l Marrakech -d 20171218
  3 +cd /mnt/data/home/baetensl/clouds_detection_git/PCC
  4 +python all_run_pcc.py -l Mongu -d 20161112