Commit 747c1fe24cc0c70e799677299a68de448a95af6c

Authored by Grizonnet Manuel (CNES)
2 parents 1e43d2cf 668e4364
Exists in develop and in 2 other branches master, utils

Merge branch 'develop'

CMakeLists.txt
... ... @@ -8,10 +8,29 @@ include_directories(src)
8 8  
9 9 find_package(GDAL REQUIRED)
10 10  
  11 +
11 12 if(NOT GDAL_FOUND)
12 13 message(FATAL_ERROR "Cannot find GDAL. Set GDAL_INCLUDE_DIR and GDAL_LIBRARY")
13 14 endif()
14 15  
  16 +if (GDAL_CONFIG)
  17 + ## extract gdal version
  18 + exec_program(${GDAL_CONFIG}
  19 + ARGS --version
  20 + OUTPUT_VARIABLE GDAL_VERSION )
  21 + string(REGEX REPLACE "([0-9]+)\\.([0-9]+)\\.([0-9]+)" "\\1" GDAL_VERSION_MAJOR "${GDAL_VERSION}")
  22 + string(REGEX REPLACE "([0-9]+)\\.([0-9]+)\\.([0-9]+)" "\\2" GDAL_VERSION_MINOR "${GDAL_VERSION}")
  23 +
  24 + #MESSAGE("DBG GDAL_VERSION ${GDAL_VERSION}")
  25 + #MESSAGE("DBG GDAL_VERSION_MAJOR ${GDAL_VERSION_MAJOR}")
  26 + #MESSAGE("DBG GDAL_VERSION_MINOR ${GDAL_VERSION_MINOR}")
  27 +
  28 + # check for gdal version
  29 + if (GDAL_VERSION_MAJOR LESS 2)
  30 + message (FATAL_ERROR "GDAL version is too old (${GDAL_VERSION}). Use 2.0 or higher.")
  31 + endif ()
  32 +endif()
  33 +
15 34 #Set python home (needed on the cnes cluster)
16 35 set(PYTHON_INCLUDE_DIRS $ENV{PYTHONHOME}/include)
17 36  
... ...
README.md
... ... @@ -36,10 +36,10 @@ runLis.sh param.json
36 36  
37 37 * COMPO: RGB composition with snow mask
38 38 * SNOW_ALL: Binary mask of snow and clouds.
39   - * 1st bit: Snow detected from pass1
40   - * 2nd bit: Snow detected from pass2
41   - * 3rd bit: Clouds detected from pass1
42   - * 4th bit: Clouds refined from pass2
  39 + * 1st bit: Snow mask after pass1
  40 + * 2nd bit: Snow mask after pass2
  41 + * 3rd bit: Clouds detected at pass0
  42 + * 4th bit: Clouds refined at pass0
43 43  
44 44 For example if you want to get the snow from pass1 and clouds detected from pass1 you need to do:
45 45 ````
... ... @@ -72,11 +72,15 @@ Code to generate CES Neige products on Theia platforms
72 72  
73 73 lis dependencies:
74 74  
75   -GDAL >=1.9
  75 +GDAL >=2.0
76 76 OTB >= 5.0
77 77 Boost-Python
78 78 Python interpreter >= 2.7
79 79 Python libs >= 2.7
  80 +Python packages:
  81 +numpy
  82 +lxml
  83 +matplotlib
80 84  
81 85 GDAL itself depends on a number of other libraries provided by most major operating systems and also depends on the non standard GEOS and PROJ4 libraries. GDAl- Python bindings are also required
82 86  
... ...
config/param_cloudremoval_template.json
... ... @@ -18,12 +18,6 @@
18 18 "s1":true,
19 19 "s2":true,
20 20 "s3":true,
21   - "s4":true,
22   - "s5":true
23   - },
24   - "parameters":{
25   - "hsMin":0,
26   - "hsMax":0,
27   - "windowSize":0
  21 + "s4":true
28 22 }
29 23 }
... ...
config/param_full_Landsat8_template.json
... ... @@ -2,7 +2,7 @@
2 2 {
3 3 "general":{
4 4 "pout":"outputdir",
5   - "shadow_value":64,
  5 + "shadow_value":32,
6 6 "ram":1024,
7 7 "nbThreads":1,
8 8 "mode":"landsat",
... ... @@ -18,7 +18,7 @@
18 18 },
19 19 "cloud_mask":{
20 20 "rf":8,
21   - "rRed_darkcloud":650,
  21 + "rRed_darkcloud":500,
22 22 "rRed_backtocloud":100
23 23 },
24 24 "snow":{
... ...
config/param_s2.json
1 1 {
2 2 "general":{
3 3 "pout":"/mnt/data/home/grizonnetm/temporary/s2",
4   - "shadow_value":64,
  4 + "shadow_value":32,
5 5 "ram":1024,
6 6 "mode":"s2",
7 7 "generate_vector":false
... ... @@ -13,7 +13,7 @@
13 13 },
14 14 "cloud_mask":{
15 15 "rf":8,
16   - "rRed_darkcloud":650,
  16 + "rRed_darkcloud":500,
17 17 "rRed_backtocloud":100
18 18 },
19 19 "snow":{
... ...
config/param_test_s2.json 0 → 100644
... ... @@ -0,0 +1,46 @@
  1 +{
  2 + "general":{
  3 + "pout":"/home/klempkat/let-it-snow/muscate/output",
  4 + "nodata":-10000,
  5 + "ram":1024,
  6 + "nb_threads":1,
  7 + "generate_vector":false,
  8 + "preprocessing":false,
  9 + "log":true
  10 + },
  11 + "inputs":{
  12 + "green_band": {
  13 + "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B3.tif",
  14 + "noBand": 1
  15 + },
  16 + "red_band": {
  17 + "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B4.tif",
  18 + "noBand": 1
  19 + },
  20 + "swir_band": {
  21 + "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B11.tif",
  22 + "noBand": 1
  23 + },
  24 + "dem":"/home/grizonnetm/Data-Neige/MUSCATE/MNT_S2_N2/S2__TEST_AUX_REFDE2_T29RNQ_0001_1.0/S2__TEST_AUX_REFDE2_T29RNQ_0001/S2__TEST_AUX_REFDE2_T29RNQ_0001.DBL.DIR/S2__TEST_AUX_REFDE2_T29RNQ_0001_ALT_R2.TIF",
  25 + "cloud_mask":"/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/MASKS/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_CLM_R2.tif"
  26 + },
  27 + "cloud":
  28 + {
  29 + "shadow_mask":64,
  30 + "all_cloud_mask":0,
  31 + "high_cloud_mask":32,
  32 + "rf":8,
  33 + "red_darkcloud":650,
  34 + "red_backtocloud":100
  35 + },
  36 + "snow":{
  37 + "dz":100,
  38 + "ndsi_pass1":0.4,
  39 + "red_pass1":200,
  40 + "ndsi_pass2":0.15,
  41 + "red_pass2":120,
  42 + "fsnow_lim":0.1,
  43 + "fsnow_total_lim":0.001
  44 + }
  45 +}
  46 +
... ...
hpc/run_cloud_removal_cluster.sh 0 → 100644
... ... @@ -0,0 +1,122 @@
  1 +#!/bin/bash
  2 +#Script launching LIS cloud removal on linux3-ci
  3 +#
  4 +#Please setup USER CONFIG for your system before lauching this script
  5 +######################USER CONFIG####################################
  6 +#####################################################################
  7 +#lis app
  8 +lis_app=$HOME/lis/run_cloud_removal.py
  9 +#json template
  10 +lis_config=$HOME/lis/config/param_cloudremoval_template.json
  11 +#path where pbs script will be generated
  12 +lis_job_script_PBS=$HOME/lis/pbs/lis_job_cr.pbs
  13 +#path where config will be generated
  14 +lis_config_list=$HOME/lis/config/config_list_cr.conf
  15 +#pbs log
  16 +lis_log=$HOME/lis/log
  17 +#IO directories
  18 +data_input=$DATACI/test_cloudremoval/input
  19 +data_output=$DATACI/test_cloudremoval/output
  20 +#tiles to compute
  21 +tiles="N2A_EcrinsFranceD0000B0000"
  22 +stats=false
  23 +hsmin=2500
  24 +hsmax=3700
  25 +#####################################################################
  26 +
  27 +echo "Generating config list..."
  28 +rm $lis_config_list
  29 +tiles_nb=0
  30 +for tile in $tiles
  31 +do
  32 +pimg=$data_input/$tile
  33 +inputdem=$data_input/SRTM/$tile/$tile.tif
  34 +
  35 +imgarr=($pimg/*)
  36 +imgnb=$(find $pimg -mindepth 1 -maxdepth 1 -type d | wc -l)
  37 +slicemax=$(($imgnb-2))
  38 +
  39 +for i in `seq 2 $slicemax`
  40 +do
  41 + echo "$tile $inputdem ${imgarr[$i-2]} ${imgarr[$i-1]} ${imgarr[$i]} ${imgarr[$i+1]} ${imgarr[$i+2]}" >> $lis_config_list
  42 + ((tiles_nb++))
  43 +done
  44 +done
  45 +
  46 +echo "Done"
  47 +echo "Number of images to compute: $tiles_nb"
  48 +
  49 +echo "Generating pbs script..."
  50 +#Create pbs job script
  51 +cat <<EOF > $lis_job_script_PBS
  52 +#!/bin/bash
  53 +#PBS -N lis
  54 +#PBS -l select=1:ncpus=1
  55 +#PBS -l walltime=00:45:00
  56 +#PBS -o $lis_log
  57 +#PBS -e $lis_log
  58 +#PBS -J 1-${tiles_nb}:1
  59 +
  60 +tile=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f1)
  61 +dempath=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f2)
  62 +m2path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f3)
  63 +m1path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f4)
  64 +t0path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f5)
  65 +p1path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f6)
  66 +p2path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f7)
  67 +
  68 +#copy input data to tmp
  69 +#tmp directories
  70 +rm -r \$TMPCI/\$(basename \$t0path)_LIS_cr
  71 +data_tmp=\$TMPCI/\$(basename \$t0path)_LIS_cr
  72 +data_input_tmp=\$data_tmp/input
  73 +data_output_tmp=\$data_tmp/output
  74 +
  75 +mkdir -p \$data_input_tmp/\$tile/\$(basename \$t0path)
  76 +mkdir -p \$data_input_tmp/SRTM/\$tile
  77 +
  78 +cp -r \$t0path/* \$data_input_tmp/\$tile/\$(basename \$t0path)
  79 +cp \$dempath \$data_input_tmp/SRTM/\$tile/\$(basename \$dempath)
  80 +
  81 +t0path=\$data_input_tmp/\$tile/\$(basename \$t0path)
  82 +dempath=\$data_input_tmp/SRTM/\$tile/\$(basename \$dempath)
  83 +
  84 +#create json
  85 +config=\$t0path.json
  86 +cp $lis_config \$config
  87 +
  88 +#modify json
  89 +m2img=\$(find \$m2path -name *SEB.TIF)
  90 +m1img=\$(find \$m1path -name *SEB.TIF)
  91 +t0img=\$(find \$t0path -name *SEB.TIF)
  92 +p1img=\$(find \$p1path -name *SEB.TIF)
  93 +p2img=\$(find \$p2path -name *SEB.TIF)
  94 +pout=\$data_output_tmp/\$tile/\$(basename \$t0path)
  95 +mkdir -p \$pout
  96 +sed -i -e "s|outputdir|\$pout|g" \$config
  97 +sed -i -e "s|m2path|\$m2img|g" \$config
  98 +sed -i -e "s|m1path|\$m1img|g" \$config
  99 +sed -i -e "s|t0path|\$t0img|g" \$config
  100 +sed -i -e "s|p1path|\$p1img|g" \$config
  101 +sed -i -e "s|p2path|\$p2img|g" \$config
  102 +sed -i -e "s|dempath|\$dempath|g" \$config
  103 +sed -i -e "s|hsmax|$hsmax|g" \$config
  104 +sed -i -e "s|hsmin|$hsmin|g" \$config
  105 +
  106 +#run cloud removal
  107 +python $lis_app \$config
  108 +
  109 +#copy output files
  110 +mkdir -p $data_output/\$tile
  111 +cp -r \$pout $data_output/\$tile
  112 +cp \$config $data_output/\$tile
  113 +
  114 +EOF
  115 +
  116 +echo "Done"
  117 +
  118 +#lauch qsub
  119 +echo "Launching qsub..."
  120 +id_job_lis=$(qsub $lis_job_script_PBS)
  121 +echo "Done"
  122 +echo "LIS cr ID job: $id_job_lis"
... ...
python/s2snow/cloud_removal.py
... ... @@ -17,6 +17,7 @@ def get_raster_as_array(raster_file_name):
17 17 band = dataset.GetRasterBand(1)
18 18 array = band.ReadAsArray(0, 0, wide, high)
19 19 return array, dataset
  20 +
20 21 def set_array_as_raster(array, dataset, output_path):
21 22 high, wide = array.shape
22 23 output = gdal.GetDriverByName('GTiff').Create(output_path, wide, high, 1 ,gdal.GDT_Byte)
... ... @@ -26,38 +27,66 @@ def set_array_as_raster(array, dataset, output_path):
26 27 output.SetGeoTransform(dataset.GetGeoTransform())
27 28 output.SetProjection(dataset.GetProjection())
28 29 output = None
  30 +def compute_HSmin(image_path, dem_path):
  31 + array_image, dataset_image = get_raster_as_array(image_path)
  32 + array_dem, dataset_dem = get_raster_as_array(dem_path)
29 33  
30   -def step1(m1_path, t0_path, p1_path, output_path, ram):
31   - #S(y,x,t) = 1 if (S(y,x,t-1) = 1 and S(y,x,t+1) = 1)
32   - call(["otbcli_BandMath","-ram", str(ram), "-il", m1_path, t0_path, p1_path, "-out", output_path, "-exp", "im2b1==205?((im1b1==100&&im3b1==100)?100:im2b1):im2b1"])
  34 + return array_dem[array_image == 100].min()
  35 +
  36 +def compute_HSmax(image_path, dem_path):
  37 + array_image, dataset_image = get_raster_as_array(image_path)
  38 + array_dem, dataset_dem = get_raster_as_array(dem_path)
33 39  
34   -def step2(m2_path, m1_path, t0_path, p1_path, p2_path, output_path, ram):
35   - #S(y,x,t) = 1 if (S(y,x,t-2) = 1 and S(y,x,t+1) = 1)
36   - call(["otbcli_BandMath","-ram", str(ram), "-il", m2_path, t0_path, p1_path, "-out", output_path, "-exp", "im2b1==205?((im1b1==100&&im3b1==100)?100:im2b1):im2b1"])
37   - #S(y,x,t) = 1 if (S(y,x,t-1) = 1 and S(y,x,t+2) = 1)
38   - call(["otbcli_BandMath","-ram", str(ram), "-il", m1_path, output_path, p2_path, "-out", output_path, "-exp", "im2b1==205?((im1b1==100&&im3b1==100)?100:im2b1):im2b1"])
  40 + elev_max_nosnow = array_dem[array_image == 0].max()
  41 + return array_dem[(array_image == 100) & (array_dem > elev_max_nosnow)].min()
39 42  
40   -def step3(t0_path, dem_path, hs_min, hs_max, output_path, ram):
41   - #S(y,x,t) = 1 if (H(x,y) < Hsmin(t))
42   - call(["otbcli_BandMath","-ram", str(ram), "-il", t0_path, dem_path, "-out", output_path, "-exp", "im1b1==205?(im2b1<"+str(hs_min)+"?0:im1b1):im1b1"])
43   -
44   - #S(y,x,t) = 1 if (H(x,y) > Hsmax(t))
45   - call(["otbcli_BandMath","-ram", str(ram), "-il", output_path, dem_path, "-out", output_path, "-exp", "im1b1==205?(im2b1>"+str(hs_max)+"?100:im1b1):im1b1"])
  43 +def compute_cloudpercent(image_path):
  44 + array_image, dataset_image = get_raster_as_array(image_path)
  45 + cloud = np.sum(array_image == 205)
  46 + tot_pix = np.sum(array_image != 254)
  47 + return (float(cloud)/float(tot_pix))*100
46 48  
47   -def step4(t0_path, output_path, window_size):
  49 +def compute_cloud(image):
  50 + array, dataset = get_raster_as_array(image)
  51 + msk_cloud = (array == 205)
  52 + return np.sum(msk_cloud)
  53 +
  54 +def step1(m2_path, m1_path, t0_path, p1_path, p2_path, output_path, ram):
  55 + #S(y,x,t) = 1 if (S(y,x,t-1) = 1 and S(y,x,t+1) = 1)
  56 + call(["otbcli_BandMath","-ram", str(ram), "-il", m1_path, t0_path, p1_path, "-out", output_path, "-exp", "im2b1==205?((im1b1==100&&im3b1==100)?100:(im1b1==0&&im3b1==0?0:im2b1)):im2b1"])
  57 + #S(y,x,t) = 1 if (S(y,x,t-2) = 1 and S(y,x,t+1) = 1)
  58 + call(["otbcli_BandMath","-ram", str(ram), "-il", m2_path, output_path, p1_path, "-out", output_path, "-exp", "im2b1==205?((im1b1==100&&im3b1==100)?100:(im1b1==0&&im3b1==0?0:im2b1)):im2b1"])
  59 + #S(y,x,t) = 1 if (S(y,x,t-1) = 1 and S(y,x,t+2) = 1)
  60 + call(["otbcli_BandMath","-ram", str(ram), "-il", m1_path, output_path, p2_path, "-out", output_path, "-exp", "im2b1==205?((im1b1==100&&im3b1==100)?100:(im1b1==0&&im3b1==0?0:im2b1)):im2b1"])
  61 +
  62 +def step2(t0_path, dem_path, output_path, ram):
  63 +
  64 + percentage_cloud = compute_cloudpercent(t0_path)
  65 + print "cloud percent : " + str(percentage_cloud)
  66 + if percentage_cloud < 30:
  67 + #S(y,x,t) = 1 if (H(x,y) < Hsmin(t))
  68 + hs_min=compute_HSmin(t0_path, dem_path)
  69 + print "hs_min: " + str(hs_min)
  70 + call(["otbcli_BandMath","-ram", str(ram), "-il", t0_path, dem_path, "-out", output_path, "-exp", "im1b1==205?(im2b1<"+str(hs_min)+"?0:im1b1):im1b1"])
  71 + hs_max=compute_HSmax(t0_path, dem_path)
  72 + print "hs_max: " + str(hs_max)
  73 + #S(y,x,t) = 1 if (H(x,y) > Hsmax(t))
  74 + call(["otbcli_BandMath","-ram", str(ram), "-il", output_path, dem_path, "-out", output_path, "-exp", "im1b1==205?(im2b1>"+str(hs_max)+"?100:im1b1):im1b1"])
  75 +
  76 +def step3(t0_path, output_path):
48 77  
49 78 # four-pixels neighboring
50   - print "Starting step 4"
  79 + print "Starting step 3"
51 80 array, dataset = get_raster_as_array(t0_path)
52 81  
53 82 #compute 4 pixel snow neighboring
54   - step4_internal(array)
  83 + step3_internal(array)
55 84  
56 85 set_array_as_raster(array, dataset, output_path)
57 86 #create file
58   - print "End of step 4"
  87 + print "End of step 3"
59 88  
60   -def step4_internal(array):
  89 +def step3_internal(array):
61 90 # Get west, north, east & south elements for [1:-1,1:-1] region of input array
62 91 W = array[1:-1,:-2]
63 92 N = array[:-2,1:-1]
... ... @@ -70,20 +99,20 @@ def step4_internal(array):
70 99 # Use the mask to set corresponding elements
71 100 array[1:-1,1:-1][mask] = 100
72 101  
73   -def step5(t0_path, dem_path, output_path, window_size):
  102 +def step4(t0_path, dem_path, output_path):
74 103 # S(y,x,t) = 1 if (S(y+k,x+k,t)(kc(-1,1)) = 1 and H(y+k,x+k)(kc(-1,1)) < H(y,x))
75   - print "Starting step 5"
  104 + print "Starting step 4"
76 105 array, dataset = get_raster_as_array(t0_path)
77 106 array_dem, dataset_dem = get_raster_as_array(dem_path)
78 107  
79 108 # step5
80   - step5_internal(array, array_dem)
  109 + step4_internal(array, array_dem)
81 110  
82 111 #create file
83 112 set_array_as_raster(array, dataset, output_path)
84   - print "End of step 5"
  113 + print "End of step 4"
85 114  
86   -def step5_internal(array, array_dem):
  115 +def step4_internal(array, array_dem):
87 116 # Get 8 neighboring pixels for raster and dem
88 117 W = array[1:-1,:-2]
89 118 NW = array[:-2,:-2]
... ... @@ -109,12 +138,6 @@ def step5_internal(array, array_dem):
109 138 # Use the mask to set corresponding elements
110 139 array[1:-1,1:-1][mask] = 100
111 140  
112   -def compute_cloud(image):
113   - array, dataset = get_raster_as_array(image)
114   - msk_cloud = (array == 205)
115   - return np.sum(msk_cloud)
116   -
117   -
118 141 def compute_stats(image, image_relative, image_reference):
119 142 array, dataset = get_raster_as_array(image)
120 143 array_relative, dataset = get_raster_as_array(image_relative)
... ... @@ -142,15 +165,20 @@ def compute_stats_internal(array, array_relative, array_reference):
142 165  
143 166 # return all the result
144 167 return cloud_elim, TRUE, FALSE, StoS, LtoL, StoL, LtoS
  168 +#Set percentage based on the cloud removal percent
145 169 def format_percent(array, total_cloud):
146 170 #TODO FACTO
147 171 stats_array_percent = np.copy(array.astype(float))
148   - stats_array_percent[:,6] = np.divide(stats_array_percent[:,6], stats_array_percent[:,2])
149   - stats_array_percent[:,5] = np.divide(stats_array_percent[:,5], stats_array_percent[:,2])
150   - stats_array_percent[:,4] = np.divide(stats_array_percent[:,4], stats_array_percent[:,1])
151   - stats_array_percent[:,3] = np.divide(stats_array_percent[:,3], stats_array_percent[:,1])
152   - stats_array_percent[:,2] = np.divide(stats_array_percent[:,2], stats_array_percent[:,0])
153   - stats_array_percent[:,1] = np.divide(stats_array_percent[:,1], stats_array_percent[:,0])
  172 + # divide by zero => value set to 0
  173 + with np.errstate(divide='ignore', invalid='ignore'):
  174 + stats_array_percent[:,6] = np.divide(stats_array_percent[:,6], stats_array_percent[:,2])
  175 + stats_array_percent[:,5] = np.divide(stats_array_percent[:,5], stats_array_percent[:,2])
  176 + stats_array_percent[:,4] = np.divide(stats_array_percent[:,4], stats_array_percent[:,1])
  177 + stats_array_percent[:,3] = np.divide(stats_array_percent[:,3], stats_array_percent[:,1])
  178 + stats_array_percent[:,2] = np.divide(stats_array_percent[:,2], stats_array_percent[:,0])
  179 + stats_array_percent[:,1] = np.divide(stats_array_percent[:,1], stats_array_percent[:,0])
  180 + stats_array_percent[~np.isfinite(stats_array_percent)] = 0 #prevents nan
  181 +
154 182 stats_array_percent[:,0] /= total_cloud
155 183 stats_array_percent[:,1] = np.multiply(stats_array_percent[:,1], stats_array_percent[:,0])
156 184 stats_array_percent[:,2] = np.multiply(stats_array_percent[:,2], stats_array_percent[:,0])
... ... @@ -197,57 +225,49 @@ def run(data):
197 225 p1_path=inputs.get("p1Path")
198 226 p2_path=inputs.get("p2Path")
199 227 dem_path=inputs.get("demPath")
200   - if stats:
201   - ref_path=inputs.get("refPath")
202   - parameters=data["parameters"]
203   - hs_min=parameters.get("hsMin")
204   - hs_max=parameters.get("hsMax")
205   - window_size=parameters.get("windowSize")
  228 + ref_path=inputs.get("refPath","norefpath")
  229 + #parameters=data["parameters"]
  230 + #hs_min=parameters.get("hsMin")
  231 + #hs_max=parameters.get("hsMax")
  232 + #window_size=parameters.get("windowSize")
206 233  
207 234 steps=data["steps"]
208 235 s1=steps.get("s1", True)
209 236 s2=steps.get("s2", True)
210 237 s3=steps.get("s3", True)
211 238 s4=steps.get("s4", True)
212   - s5=steps.get("s5", True)
213 239  
214   - stats = []
  240 + statsarr = []
215 241  
216 242 #TODO FACTO. Generic step ? arg function
217 243 latest_file_path=t0_path
218 244 if s1:
219 245 temp_output_path=op.join(output_path, "cloud_removal_output_step1.tif")
220   - step1(m1_path, latest_file_path, p1_path, temp_output_path, ram)
  246 + step1(m2_path, m1_path, latest_file_path, p1_path, p2_path, temp_output_path, ram)
221 247 if stats:
222   - stats.append(compute_stats(temp_output_path, latest_file_path, ref_path))
  248 + statsarr.append(compute_stats(temp_output_path, latest_file_path, ref_path))
223 249 latest_file_path=temp_output_path
224 250 if s2:
225 251 temp_output_path=op.join(output_path, "cloud_removal_output_step2.tif")
226   - step2(m2_path, m1_path, latest_file_path, p1_path, p2_path, temp_output_path, ram)
  252 + step2(latest_file_path, dem_path, temp_output_path, ram)
227 253 if stats:
228   - stats.append(compute_stats(temp_output_path, latest_file_path, ref_path))
  254 + statsarr.append(compute_stats(temp_output_path, latest_file_path, ref_path))
229 255 latest_file_path=temp_output_path
230 256 if s3:
231 257 temp_output_path=op.join(output_path, "cloud_removal_output_step3.tif")
232   - step3(latest_file_path, dem_path, hs_min, hs_max, temp_output_path, ram)
  258 + step3(latest_file_path, temp_output_path)
233 259 if stats:
234   - stats.append(compute_stats(temp_output_path, latest_file_path, ref_path))
  260 + statsarr.append(compute_stats(temp_output_path, latest_file_path, ref_path))
235 261 latest_file_path=temp_output_path
236 262 if s4:
237 263 temp_output_path=op.join(output_path, "cloud_removal_output_step4.tif")
238   - step4(latest_file_path, temp_output_path, window_size)
239   - if stats:
240   - stats.append(compute_stats(temp_output_path, latest_file_path, ref_path))
241   - latest_file_path=temp_output_path
242   - if s5:
243   - temp_output_path=op.join(output_path, "cloud_removal_output_step5.tif")
244   - step5(latest_file_path, dem_path, temp_output_path, window_size)
  264 + step4(latest_file_path, dem_path, temp_output_path)
245 265 if stats:
246   - stats.append(compute_stats(temp_output_path, latest_file_path, ref_path))
  266 + statsarr.append(compute_stats(temp_output_path, latest_file_path, ref_path))
247 267 latest_file_path=temp_output_path
248 268  
249 269 if stats:
250   - stats_array = np.array(stats)
  270 + stats_array = np.array(statsarr)
251 271 stats_array_percent = format_percent(stats_array, compute_cloud(t0_path))
252 272 stats_array = np.vstack([stats_array, np.sum(stats_array, axis=0)]) #add total to array
253 273 stats_array_percent = np.vstack([stats_array_percent, np.sum(stats_array_percent, axis=0)]) #add total to array
... ... @@ -260,8 +280,9 @@ def run(data):
260 280 #plot_stats(stats_array)
261 281  
262 282 # Python list supported not numpy array
  283 + statsabs = stats_array.tolist()
263 284 statsf = open('stats.json', 'w')
264   - json.dump(stats, statsf)
  285 + json.dump(statsabs, statsf)
265 286 statsf.close()
266 287  
267 288 statsp = stats_array_percent.tolist()
... ...
python/s2snow/format_output.py
... ... @@ -8,6 +8,8 @@ import datetime
8 8 from lxml import etree
9 9 from shutil import copyfile
10 10 import gdal
  11 +import gdalconst
  12 +import numpy as np
11 13  
12 14 # run subprocess and write to stdout and stderr
13 15 def call_subprocess(process_list):
... ... @@ -16,73 +18,90 @@ def call_subprocess(process_list):
16 18 print out
17 19 sys.stderr.write(err)
18 20  
  21 +def get_raster_as_array(raster_file_name):
  22 + dataset = gdal.Open(raster_file_name, gdalconst.GA_ReadOnly)
  23 + wide = dataset.RasterXSize
  24 + high = dataset.RasterYSize
  25 + band = dataset.GetRasterBand(1)
  26 + array = band.ReadAsArray(0, 0, wide, high)
  27 + return array, dataset
19 28  
20   -def format_LIS(snow_detector):
21   - path_img = snow_detector.img
22   - pout = snow_detector.path_tmp
23   - zs = snow_detector.zs
24   - snow_percent = snow_detector.snow_percent
25   - cloud_percent = snow_detector.cloud_percent
26   - ram = snow_detector.ram
27   - mode = snow_detector.mode
28   - nodata_path = snow_detector.nodata_path
  29 +def compute_cloudpercent(image_path):
  30 + array_image, dataset_image = get_raster_as_array(image_path)
  31 + cloud = np.sum(array_image == 205)
  32 + tot_pix = np.sum(array_image != 254)
  33 + return (float(cloud)/float(tot_pix))*100
29 34  
30   - if mode == "s2":
31   - #ID corresponding to the parent folder of the img
32   - product_id = op.basename(op.abspath(op.join(path_img, os.pardir)))
33   - else:
34   - #ID corresponding to the name of the img
35   - product_id = op.splitext(op.basename(path_img))[0]
  35 +def compute_snowpercent(image_path):
  36 + array_image, dataset_image = get_raster_as_array(image_path)
  37 + cloud = np.sum(array_image == 100)
  38 + tot_pix = np.sum(array_image != 254)
  39 + return (float(cloud)/float(tot_pix))*100
36 40  
37   - ext = "TIF"
38   -
39   - #TODO associate product name with let-it-snow results to make a loop
40   - code_snow_all = "_SNOW_ALL"
41   - str_snow_all = product_id+code_snow_all+"."+ext
42   - str_snow_all = str_snow_all.upper()
43   - copyfile(op.join(pout, "snow_all.tif"), op.join(pout, str_snow_all))
  41 +def format_LIS(snow_detector):
  42 + path_img = snow_detector.img
  43 + pout = snow_detector.path_tmp
  44 + zs = snow_detector.zs
  45 + ram = snow_detector.ram
  46 + mode = snow_detector.mode
  47 + nodata_path = snow_detector.nodata_path
  48 +
  49 + product_id = op.splitext(op.basename(path_img))[0]
  50 + path_products = op.join(pout, "LIS_PRODUCTS")
44 51  
45   - code_compo = "_COMPO"
46   - str_compo = product_id+code_compo+"."+ext
47   - str_compo = str_compo.upper()
48   - copyfile(op.join(pout, "composition.tif"), op.join(pout, str_compo))
49   -
50   - code_seb = "_SEB"
51   - str_seb = product_id+code_seb+"."+ext
52   - str_seb = str_seb.upper()
53   - format_SEB_values(op.join(pout, "final_mask.tif"), nodata_path, ram)
54   - copyfile(op.join(pout, "final_mask.tif"), op.join(pout, str_seb))
55   -
56   - code_seb_vec = "_SEB_VEC"
57   - for f in glob.glob(op.join(pout, "final_mask_vec.*")):
58   - extension = op.splitext(f)[1]
59   - str_seb_vec = product_id+code_seb_vec+extension
60   - str_seb_vec = str_seb_vec.upper()
61   - if extension == ".dbf":
62   - format_SEB_VEC_values(f)
63   - copyfile(f, op.join(pout, str_seb_vec))
64   -
65   - root = etree.Element("Source_Product")
66   - etree.SubElement(root, "PRODUCT_ID").text = product_id
67   - egil = etree.SubElement(root, "Global_Index_List")
68   - etree.SubElement(egil, "QUALITY_INDEX", name='ZS').text = str(zs)
69   - etree.SubElement(egil, "QUALITY_INDEX", name='SnowPercent').text = str(snow_percent)
70   - etree.SubElement(egil, "QUALITY_INDEX", name='CloudPercent').text = str(cloud_percent)
71   - et = etree.ElementTree(root)
72   - et.write(op.join(pout, "metadata.xml"), pretty_print = True)
73   - code_metadata = "_METADATA"
74   - str_metadata = product_id+code_metadata+".xml"
75   - str_metadata = str_metadata.upper()
76   - copyfile(op.join(pout, "metadata.xml"), op.join(pout, str_metadata))
  52 + if not op.exists(path_products):
  53 + os.makedirs(path_products)
  54 +
  55 + ext = "TIF"
  56 + code_snow_all = "_SNOW_ALL"
  57 + str_snow_all = product_id+code_snow_all+"."+ext
  58 + str_snow_all = str_snow_all.upper()
  59 + copyfile(op.join(pout, "snow_all.tif"), op.join(path_products, str_snow_all))
  60 +
  61 + code_seb = "_SEB"
  62 + str_seb = product_id+code_seb+"."+ext
  63 + str_seb = str_seb.upper()
  64 + format_SEB_values(op.join(pout, "final_mask.tif"), nodata_path, ram)
  65 + copyfile(op.join(pout, "final_mask.tif"), op.join(path_products, str_seb))
  66 +
  67 + code_seb_vec = "_SEB_VEC"
  68 + for f in glob.glob(op.join(pout, "final_mask_vec.*")):
  69 + extension = op.splitext(f)[1]
  70 + str_seb_vec = product_id+code_seb_vec+extension
  71 + str_seb_vec = str_seb_vec.upper()
  72 + if extension == ".dbf":
  73 + format_SEB_VEC_values(f)
  74 + copyfile(f, op.join(path_products, str_seb_vec))
  75 +
  76 + code_compo = "_COMPO"
  77 + str_compo = product_id+code_compo+".tif"
  78 + str_compo = str_compo.upper()
  79 + copyfile(op.join(pout, "composition.tif"), op.join(path_products, str_compo))
77 80  
  81 + snow_percent = compute_snowpercent(op.join(pout, "final_mask.tif"))
  82 + cloud_percent = compute_cloudpercent(op.join(pout, "final_mask.tif"))
  83 +
  84 + root = etree.Element("Source_Product")
  85 + etree.SubElement(root, "PRODUCT_ID").text = product_id
  86 + egil = etree.SubElement(root, "Global_Index_List")
  87 + etree.SubElement(egil, "QUALITY_INDEX", name='ZS').text = str(zs)
  88 + etree.SubElement(egil, "QUALITY_INDEX", name='SnowPercent').text = str(snow_percent)
  89 + etree.SubElement(egil, "QUALITY_INDEX", name='CloudPercent').text = str(cloud_percent)
  90 + et = etree.ElementTree(root)
  91 + et.write(op.join(pout, "metadata.xml"), pretty_print = True)
  92 + code_metadata = "_METADATA"
  93 + str_metadata = product_id+code_metadata+".xml"
  94 + str_metadata = str_metadata.upper()
  95 + copyfile(op.join(pout, "metadata.xml"), op.join(path_products, str_metadata))
  96 +
78 97 def format_SEB_values(path, nodata_path, ram):
79 98 call_subprocess(["otbcli_BandMath", "-il", path, "-out", path, "uint8", "-ram",str(ram), "-exp", "(im1b1==1)?100:(im1b1==2)?205:0"])
80 99 call_subprocess(["otbcli_BandMath", "-il", path, nodata_path, "-out", path, "uint8", "-ram" , str(ram), "-exp", "im2b1==1?254:im1b1"])
81   -
  100 +
82 101 def format_SEB_VEC_values(path):
83   - table = op.splitext(op.basename(path))[0]
84   - call_subprocess(["ogrinfo", path, "-sql", "ALTER TABLE "+table+" ADD COLUMN type varchar(15)"])
85   - call_subprocess(["ogrinfo", path, "-dialect", "SQLite", "-sql", "UPDATE '"+table+"' SET DN=100, type='snow' WHERE DN=1"])
86   - call_subprocess(["ogrinfo", path, "-dialect", "SQLite", "-sql", "UPDATE '"+table+"' SET DN=205, type='cloud' WHERE DN=2"])
87   - call_subprocess(["ogrinfo", path, "-dialect", "SQLite", "-sql", "UPDATE '"+table+"' SET DN=254, type='no data' WHERE DN != 100 AND DN != 205"])
  102 + table = op.splitext(op.basename(path))[0]
  103 + call_subprocess(["ogrinfo", path, "-sql", "ALTER TABLE "+table+" ADD COLUMN type varchar(15)"])
  104 + call_subprocess(["ogrinfo", path, "-dialect", "SQLite", "-sql", "UPDATE '"+table+"' SET DN=100, type='snow' WHERE DN=1"])
  105 + call_subprocess(["ogrinfo", path, "-dialect", "SQLite", "-sql", "UPDATE '"+table+"' SET DN=205, type='cloud' WHERE DN=2"])
  106 + call_subprocess(["ogrinfo", path, "-dialect", "SQLite", "-sql", "UPDATE '"+table+"' SET DN=254, type='no data' WHERE DN != 100 AND DN != 205"])
88 107  
... ...
python/s2snow/snow_detector.py
... ... @@ -27,6 +27,7 @@ from gdalconst import *
27 27 import multiprocessing
28 28 import numpy as np
29 29 import uuid
  30 +from shutil import copyfile
30 31  
31 32 # this allows GDAL to throw Python Exceptions
32 33 gdal.UseExceptions()
... ... @@ -58,15 +59,16 @@ def polygonize(input_img,input_mask,output_vec):
58 59 """Helper function to polygonize raster mask using gdal polygonize"""
59 60 call_subprocess(["gdal_polygonize.py",input_img,"-f","ESRI Shapefile","-mask",input_mask,output_vec])
60 61  
61   -def composition_RGB(input_img,output_img, nRed, nGreen, nSWIR):
  62 +def composition_RGB(input_img,output_img):
62 63 """Make a RGB composition to highlight the snow cover
63 64  
64   - input_img: multispectral Level 2 SPOT-4 (GTiff), output_img: false color
  65 + input_img: multispectral tiff, output_img: false color
65 66 composite RGB image (GTiff).nRed,nGreen,nSWIR are index of red, green and
66 67 SWIR in in put images.
67 68  
68 69 """
69   - call_subprocess(["gdal_translate","-co","PHOTOMETRIC=RGB","-scale","0","300","-ot","Byte","-b",str(nSWIR),"-b",str(nRed),"-b",str(nGreen),input_img,output_img])
  70 + #call_subprocess(["gdal_translate","-co","PHOTOMETRIC=RGB","-scale","0","300","-ot","Byte","-b",str(nSWIR),"-b",str(nRed),"-b",str(nGreen),input_img,output_img])
  71 + call_subprocess(["otbcli_Convert", "-in", input_img, "-out", output_img, "uint8", "-type", "linear"])
70 72  
71 73 def burn_polygons_edges(input_img,input_vec):
72 74 """Burn polygon borders onto an image with the following symbology:
... ... @@ -77,44 +79,43 @@ def burn_polygons_edges(input_img,input_vec):
77 79  
78 80 """
79 81  
80   - #Save temporary file in working directory
  82 + #Save temporary file in working directory
81 83  
82   - #Retrieve directory from input vector file
83   - input_dir=os.path.dirname(input_vec)
84   - #TODO move to snowdetector class?
85   - #Get unique identifier for the temporary file
86   - unique_filename=uuid.uuid4()
  84 + #Retrieve directory from input vector file
  85 + input_dir=os.path.dirname(input_vec)
  86 + #TODO move to snowdetector class?
  87 + #Get unique identifier for the temporary file
  88 + unique_filename=uuid.uuid4()
87 89 tmp_line=op.join(input_dir,str(unique_filename))
  90 + print "tmpline: " + str(tmp_line)
88 91  
  92 + #print "gdal version " + gdal.VersionInfo.str()
  93 +
89 94 call_subprocess(["ogr2ogr","-overwrite","-nlt","MULTILINESTRING",tmp_line+".shp",input_vec])
90   - # 2) rasterize cloud and cloud shadows polygon borders in green
91   - call_subprocess(["gdal_rasterize","-b","1","-b","2","-b","3","-burn","0","-burn","255","-burn","0","-where","DN=\"2\"","-l","tmp_line",tmp_line+".shp",input_img])
92   - # 3) rasterize snow polygon borders in magenta
93   - call_subprocess(["gdal_rasterize","-b","1","-b","2","-b","3","-burn","255","-burn","0","-burn","255","-where","DN=\"1\"","-l","tmp_line",tmp_line+".shp",input_img])
94   - # 4) remove tmp_line files
95   - call_subprocess(["rm"]+glob.glob(tmp_line+"*"))
96   -
97   -def get_total_pixels(imgpath):
98   - dataset = gdal.Open(imgpath, GA_ReadOnly)
99   - #assume that snow and cloud images are of the same size
100   - total_pixels=dataset.RasterXSize*dataset.RasterYSize
101   - return total_pixels
102   -
103   -def get_total_pixels_without_nodata(nodata_mask):
104   - dataset = gdal.Open(nodata_mask, GA_ReadOnly)
105   - #assume that snow and cloud images are of the same size
106   - wide = dataset.RasterXSize
107   - high = dataset.RasterYSize
108   - band = dataset.GetRasterBand(1)
109   - array = band.ReadAsArray(0, 0, wide, high)
110   - l = list(array.flatten())
111   - return l.count(0)
  95 +
  96 + if gdal.VersionInfo() >= 2000000:
  97 + print "GDAL version >= 2.0 detected. Where statement syntax have changed in gdal."
  98 + # 2) rasterize cloud and cloud shadows polygon borders in green
  99 + call_subprocess(["gdal_rasterize","-b","1","-b","2","-b","3","-burn","0","-burn","255","-burn","0","-where",'DN=2',"-l",str(unique_filename),tmp_line+".shp",input_img])
  100 + # 3) rasterize snow polygon borders in magenta
  101 + call_subprocess(["gdal_rasterize","-b","1","-b","2","-b","3","-burn","255","-burn","0","-burn","255","-where",'DN=1',"-l",str(unique_filename),tmp_line+".shp",input_img])
  102 + else:
  103 + print "GDAL version <2."
  104 + # 2) rasterize cloud and cloud shadows polygon borders in green
  105 + call_subprocess(["gdal_rasterize","-b","1","-b","2","-b","3","-burn","0","-burn","255","-burn","0","-where","'DN=\"2\"'","-l",str(unique_filename),tmp_line+".shp",input_img])
  106 + # 3) rasterize snow polygon borders in magenta
  107 + call_subprocess(["gdal_rasterize","-b","1","-b","2","-b","3","-burn","255","-burn","0","-burn","255","-where","'DN=\"1\"'","-l",str(unique_filename),tmp_line+".shp",input_img])
  108 +
  109 + # 4) remove tmp_line files
  110 + for shp in glob.glob(tmp_line+"*"):
  111 + os.remove(shp)
112 112  
113 113 class snow_detector :
114 114 def __init__(self, data):
115 115  
116 116 self.version = VERSION
117 117 #Parse general parameters
  118 +
118 119 general=data["general"]
119 120 self.path_tmp=str(general.get("pout"))
120 121 self.ram=general.get("ram", 512)
... ... @@ -128,26 +129,106 @@ class snow_detector :
128 129 self.generate_vector=general.get("generate_vector", False)
129 130 self.do_preprocessing=general.get("preprocessing", False)
130 131 self.do_postprocessing=True
131   - self.shadow_value=general.get("shadow_value")
  132 + self.nodata=-10000 #TODO parse json if needed
132 133 #Parse cloud data
133   - cloud_mask=data["cloud_mask"]
134   - self.rf=cloud_mask.get("rf")
135   - self.rRed_darkcloud=cloud_mask.get("rRed_darkcloud")
136   - self.rRed_backtocloud=cloud_mask.get("rRed_backtocloud")
  134 + cloud=data["cloud"]
  135 + self.rf=cloud.get("rf")
  136 + self.rRed_darkcloud=cloud.get("red_darkcloud")
  137 + self.rRed_backtocloud=cloud.get("red_backtocloud")
  138 + self.shadow_mask=cloud.get("shadow_mask")
  139 + self.all_cloud_mask=cloud.get("all_cloud_mask")
  140 + self.high_cloud_mask=cloud.get("high_cloud_mask")
137 141 #Parse input parameters
138 142 inputs=data["inputs"]
139 143 if(self.do_preprocessing):
140 144 self.vrt=str(inputs.get("vrt"))
141   - self.img=str(inputs.get("image"))
  145 + #self.img=str(inputs.get("image"))
142 146 self.dem=str(inputs.get("dem"))
143 147 self.cloud_init=str(inputs.get("cloud_mask"))
  148 +
  149 + #bands paths
  150 + green_band=inputs["green_band"]
  151 + gb_path=green_band["path"]
  152 + gb_no=green_band["noBand"]
  153 +
  154 + gb_dataset = gdal.Open(gb_path, GA_ReadOnly)
  155 + gb_path_extracted=op.join(self.path_tmp, "green_band_extracted.tif")
  156 + if gb_dataset.RasterCount > 1:
  157 + print "extracting green band"
  158 + call_subprocess(["gdal_translate", "-of","GTiff","-ot","Int16","-a_nodata", str(self.nodata),"-b",str(gb_no),gb_path,gb_path_extracted])
  159 + else:
  160 + copyfile(gb_path, gb_path_extracted)
  161 +
  162 + red_band=inputs["red_band"]
  163 + rb_path=red_band["path"]
  164 + rb_no=red_band["noBand"]
  165 +
  166 + rb_dataset = gdal.Open(rb_path, GA_ReadOnly)
  167 + rb_path_extracted=op.join(self.path_tmp, "red_band_extracted.tif")
  168 + if rb_dataset.RasterCount > 1:
  169 + print "extracting red band"
  170 + call_subprocess(["gdal_translate", "-of","GTiff","-ot","Int16","-a_nodata", str(self.nodata),"-b",str(rb_no),rb_path,rb_path_extracted])
  171 + else:
  172 + copyfile(rb_path, rb_path_extracted)
  173 +
  174 + swir_band=inputs["swir_band"]
  175 + sb_path=swir_band["path"]
  176 + sb_no=swir_band["noBand"]
  177 +
  178 + sb_dataset = gdal.Open(sb_path, GA_ReadOnly)
  179 + sb_path_extracted=op.join(self.path_tmp, "swir_band_extracted.tif")
  180 + if sb_dataset.RasterCount > 1:
  181 + print "extracting swir band"
  182 + call_subprocess(["gdal_translate", "-of","GTiff","-ot","Int16","-a_nodata", str(self.nodata),"-b",str(sb_no),sb_path,sb_path_extracted])
  183 +
  184 + else:
  185 + copyfile(sb_path, sb_path_extracted)
  186 +
  187 + #check for same res
  188 + gb_dataset = gdal.Open(gb_path_extracted, GA_ReadOnly)
  189 + rb_dataset = gdal.Open(rb_path_extracted, GA_ReadOnly)
  190 + sb_dataset = gdal.Open(sb_path_extracted, GA_ReadOnly)
  191 +
  192 + gb_resolution = gb_dataset.GetGeoTransform()[1]
  193 + rb_resolution = rb_dataset.GetGeoTransform()[1]
  194 + sb_resolution = sb_dataset.GetGeoTransform()[1]
  195 + print "green band resolution : " + str(gb_resolution)
  196 + print "red band resolution : " + str(rb_resolution)
  197 + print "swir band resolution : " + str(sb_resolution)
  198 + #test if different reso
  199 + gb_path_resampled=op.join(self.path_tmp, "green_band_resampled.tif")
  200 + rb_path_resampled=op.join(self.path_tmp, "red_band_resampled.tif")
  201 + sb_path_resampled=op.join(self.path_tmp, "swir_band_resampled.tif")
  202 + if not gb_resolution == rb_resolution == sb_resolution:
  203 + print "resolution is different among band files"
  204 + #gdalwarp to max reso
  205 + max_res = max(gb_resolution, rb_resolution, sb_resolution)
  206 + print "cubic resampling to " + str(max_res) + "of resolution"
  207 + call_subprocess(["gdalwarp", "-overwrite","-r","cubic","-tr", str(max_res),str(max_res),gb_path_extracted,gb_path_resampled])
  208 + call_subprocess(["gdalwarp", "-overwrite","-r","cubic","-tr", str(max_res),str(max_res),rb_path_extracted,rb_path_resampled])
  209 + call_subprocess(["gdalwarp", "-overwrite","-r","cubic","-tr", str(max_res),str(max_res),sb_path_extracted,sb_path_resampled])
  210 + else:
  211 + gb_path_resampled=gb_path_extracted
  212 + rb_path_resampled=rb_path_extracted
  213 + sb_path_resampled=sb_path_extracted
  214 +
  215 + #build vrt
  216 + print "building bands vrt"
  217 + self.img=op.join(self.path_tmp, "lis.vrt")
  218 + call_subprocess(["gdalbuildvrt","-separate", self.img, sb_path_resampled, rb_path_resampled, gb_path_resampled])
  219 +
  220 + #Set bands parameters
  221 + self.nGreen=3
  222 + self.nRed=2
  223 + self.nSWIR=1
  224 +
144 225 #Parse snow parameters
145 226 snow=data["snow"]
146 227 self.dz=snow.get("dz")
147 228 self.ndsi_pass1=snow.get("ndsi_pass1")
148   - self.rRed_pass1=snow.get("rRed_pass1")
  229 + self.rRed_pass1=snow.get("red_pass1")
149 230 self.ndsi_pass2=snow.get("ndsi_pass2")
150   - self.rRed_pass2=snow.get("rRed_pass2")
  231 + self.rRed_pass2=snow.get("red_pass2")
151 232 self.fsnow_lim=snow.get("fsnow_lim")
152 233 self.fsnow_total_lim=snow.get("fsnow_total_lim")
153 234 #Build useful paths
... ... @@ -155,30 +236,25 @@ class snow_detector :
155 236 self.ndsi_pass1_path=op.join(self.path_tmp,"pass1.tif")
156 237 self.cloud_refine=op.join(self.path_tmp,"cloud_refine.tif")
157 238 self.nodata_path=op.join(self.path_tmp, "nodata_mask.tif")
158   - #Set bands parameters
159   - self.nGreen=0
160   - self.nSWIR=0
161   - self.nRed=0
162   - self.nodata=0
163   -
164   - if self.mode == "spot":
165   - self.nGreen=1 # Index of green band
166   - self.nSWIR=4 # Index of SWIR band (1 to 3 µm) = band 11 (1.6 µm) in S2
167   - self.nRed=2 # Index of red band
168   - self.nodata=-10000 # no-data value
169   - elif self.mode == "landsat":
170   - self.nGreen=3
171   - self.nSWIR=6
172   - self.nRed=4
173   - self.nodata=-10000
174   - elif self.mode == "s2":
175   - sentinel_2_preprocessing()
176   - #Set generic band index for Sentinel-2
177   - self.nGreen=1
178   - self.nRed=2
179   - self.nSWIR=3
180   - else:
181   - sys.exit("Supported modes are spot4,landsat and s2.")
  239 +
  240 + # if self.mode == "spot":
  241 + # self.nGreen=1 # Index of green band
  242 + # self.nSWIR=4 # Index of SWIR band (1 to 3 µm) = band 11 (1.6 µm) in S2
  243 + # self.nRed=2 # Index of red band
  244 + # self.nodata=-10000 # no-data value
  245 + # elif self.mode == "landsat":
  246 + # self.nGreen=3
  247 + # self.nSWIR=6
  248 + # self.nRed=4
  249 + # self.nodata=-10000
  250 + # elif self.mode == "s2":
  251 + # sentinel_2_preprocessing()
  252 + # #Set generic band index for Sentinel-2
  253 + # self.nGreen=1
  254 + # self.nRed=2
  255 + # self.nSWIR=3
  256 + # else:
  257 + # sys.exit("Supported modes are spot4,landsat and s2.")
182 258  
183 259 def detect_snow(self, nbPass):
184 260 #Set maximum ITK threads
... ... @@ -202,7 +278,7 @@ class snow_detector :
202 278 polygonize(op.join(self.path_tmp,"final_mask.tif"),op.join(self.path_tmp,"final_mask.tif"),op.join(self.path_tmp,"final_mask_vec.shp"))
203 279  
204 280 #RGB composition
205   - composition_RGB(self.img,op.join(self.path_tmp,"composition.tif"),self.nRed,self.nGreen,self.nSWIR)
  281 + composition_RGB(self.img,op.join(self.path_tmp,"composition.tif"))
206 282  
207 283 #Burn polygons edges on the composition
208 284 #TODO add pass1 snow polygon in yellow
... ... @@ -236,8 +312,13 @@ class snow_detector :
236 312 call_subprocess(["gdal_edit.py","-tr",str(geotransform[1]),str(geotransform[5]),op.join(self.path_tmp,"red_nn.tif")])
237 313  
238 314 #Extract shadow mask
239   - condition_shadow= "(im1b1>0 and im2b1>" + str(self.rRed_darkcloud) + ") or (im1b1 >= " + str(self.shadow_value) + ")"
240   - call_subprocess(["otbcli_BandMath","-il",self.cloud_init,op.join(self.path_tmp,"red_nn.tif"),"-out",self.cloud_refine+GDAL_OPT,"uint8","-ram",str(self.ram),"-exp",condition_shadow + "?1:0"])
  315 + call_subprocess(["compute_cloud_mask", self.cloud_init, str(self.all_cloud_mask), op.join(self.path_tmp,"all_cloud_mask.tif")])
  316 + call_subprocess(["compute_cloud_mask", self.cloud_init, str(self.shadow_mask), op.join(self.path_tmp,"shadow_mask.tif")])
  317 + call_subprocess(["compute_cloud_mask", self.cloud_init, str(self.high_cloud_mask), op.join(self.path_tmp,"high_cloud_mask.tif")])
  318 + cond_cloud2="im3b1>" + str(self.rRed_darkcloud)
  319 + condition_shadow= "((im1b1==1 and " + cond_cloud2 + ") or im2b1==1 or im4b1==1)"
  320 + print condition_shadow
  321 + call_subprocess(["otbcli_BandMath","-il",op.join(self.path_tmp,"all_cloud_mask.tif"), op.join(self.path_tmp,"shadow_mask.tif"),op.join(self.path_tmp,"red_nn.tif"), op.join(self.path_tmp,"high_cloud_mask.tif"),"-out",self.cloud_refine+GDAL_OPT,"uint8","-ram",str(self.ram),"-exp",condition_shadow])
241 322  
242 323 def pass1(self):
243 324 #Pass1 : NDSI threshold
... ... @@ -305,12 +386,6 @@ class snow_detector :
305 386  
306 387 call_subprocess(["otbcli_BandMath","-il",self.cloud_refine,generic_snow_path,self.cloud_init,self.redBand_path,"-out",op.join(self.path_tmp,"final_mask.tif")+GDAL_OPT_2B,"uint8","-ram",str(self.ram),"-exp",condition_final])
307 388 call_subprocess(["compute_snow_mask", op.join(self.path_tmp,"pass1.tif"), op.join(self.path_tmp,"pass2.tif"), op.join(self.path_tmp,"cloud_pass1.tif"), op.join(self.path_tmp,"cloud_refine.tif"), op.join(self.path_tmp, "snow_all.tif")])
308   -
309   - self.snow_percent = float(histo_utils_ext.compute_nb_pixels_between_bounds(generic_snow_path, 0, 255) * 100)/get_total_pixels_without_nodata(self.nodata_path)
310   - print self.snow_percent
311   -
312   - self.cloud_percent = float(histo_utils_ext.compute_nb_pixels_between_bounds(op.join(self.path_tmp,"cloud_refine.tif"), 0, 255) * 100)/get_total_pixels_without_nodata(self.nodata_path)
313   - print self.cloud_percent
314 389  
315 390 def pass3(self):
316 391 #Fuse pass1 and pass2 (use 255 not 1 here because of bad handling of 1 byte tiff by otb)
... ... @@ -357,12 +432,12 @@ class snow_detector :
357 432 #Extract green bands and resample to 20 meters
358 433 #FIXME Use multi resolution pyramid application or new resampling filter fontribute by J. Michel hear
359 434 call_subprocess(["gdal_translate", "-a_nodata", str(self.nodata),"-ot","Int16","-b",str(nGreen),s2_r1_img_path[0],greenBand_path])
360   - call_subprocess(["gdalwarp","-r","cubicspline","-tr","20","-20",greenBand_path,greenBand_resample_path])
  435 + call_subprocess(["gdalwarp","-r","cubic","-tr","20","-20",greenBand_path,greenBand_resample_path])
361 436  
362 437 #Extract red bands and sample to 20 meters
363 438 #FIXME Use multi resolution pyramid application or new resampling filter fontribute by J. Michel hear
364 439 call_subprocess(["gdal_translate", "-a_nodata", str(self.nodata),"-ot","Int16","-b",str(nRed),s2_r1_img_path[0],redBand_path])
365   - call_subprocess(["gdalwarp","-r","cubicspline","-tr","20","-20",redBand_path,redBand_resample_path])
  440 + call_subprocess(["gdalwarp","-r","cubic","-tr","20","-20",redBand_path,redBand_resample_path])
366 441  
367 442 #Extract SWIR
368 443 call_subprocess(["gdal_translate", "-a_nodata", str(self.nodata),"-ot","Int16","-b",str(nSWIR),s2_r2_img_path[0],swirBand_path])
... ... @@ -374,3 +449,6 @@ class snow_detector :
374 449 #img variable is used later to compute snow mask
375 450 self.img=concat_s2