Commit 4a83a21d859c6703d3879d9595ca7e520d4e0d36

Authored by Grizonnet Manuel (CNES)
2 parents 94cbcba2 0202ed83
Exists in develop and in 1 other branch master

Merge branch 'release/1.3'

Showing 61 changed files with 2713 additions and 2181 deletions   Show diff stats
CHANGELOG.md
1 1 # Change Log
2 2 All notable changes to LIS will be documented in this file.
3 3  
  4 +## [Unreleased]
  5 +
  6 +### Added
  7 +- Use gdal_trace_outline from the gina-alaska package instead of gdal_polygonize if available
  8 +
  9 +### Changed
  10 +- Move OTB minimum 6.0.0 which include a fix to handle properly 1 byte TIFF image
  11 +- Restore and document correct behaviour for LIS installation with install target(lib, bin,include, python)
  12 +- New QGIS style files for raster and vector LIS product
  13 +- Use OTB Application Python API instead of call to subprocess
  14 +- Use Python Logging module for Python scripts instead of using print
  15 +- Changed compute_cloud_mask and compute_snow_mask by OTB applications
  16 +- Added a new app to generate the JSON configuration file (build_json.py)
  17 +- Changed the way the product is generated to avoid data duplication
  18 +- Change rasterize step to contour detection using 8 connectivity to generate the rgb composition
  19 +- Improved detection by adjusting default parameter red_pass2 from 0.120 to 0.40
  20 +- Improve code quality (pep8 and pylint)
  21 +- Improve installation instructions in the README.md
  22 +- Fix cpu usage to respect the "nb_threads" parameter set in the json file.
  23 +- The output product now use the input product directory name as PRODUCT_ID in the xml file.
  24 +
  25 +## [1.2.1] - 2017-09-14
  26 +- Fix segfault in case number of histogram bins for the altitude channel is zero
  27 +
4 28 ## [1.2] - 2017-06-04
5 29 - add json schema to ATBD to document all parameters
6 30 - Add version of lis in atbd
7 31 - Document how to build the documentation in doc/tex directory
8 32 - Compact histogram files and copy it in LIS_PRODUCTS
9   -- Apply autopep8 to all Python scripts to imprve code quality
  33 +- Apply autopep8 to all Python scripts to improve code quality
10 34 - Add a changelog
  35 +
11 36 ## [1.1.1] - 2016-11-28
12 37 - minor update in build scripts
13 38 - change ctest launcher location
  39 +
14 40 ## [1.1.0] - 2016-11-28
15 41 - Change license from GPL to A-GPL
16 42 - Improvments in cmake configuration
17 43 - launch tests in separate directories
  44 +
18 45 ## [1.0.0] - 2016-07-06
19 46 - First released version of LIS with support with last MUSCATE format
20 47 - Support for image splitted in multiple files
... ...
CMakeLists.txt
... ... @@ -31,7 +31,8 @@ if (GDAL_CONFIG)
31 31 endif ()
32 32 endif()
33 33  
34   -#Set python home (needed on the cnes cluster)
  34 +#Set python home (needed on the cnes hpc)
  35 +# FIXME Is it still needed on hal?
35 36 set(PYTHON_INCLUDE_DIRS $ENV{PYTHONHOME}/include)
36 37  
37 38 find_package(PythonInterp REQUIRED)
... ... @@ -40,22 +41,24 @@ find_package( PythonLibs 2.7 REQUIRED)
40 41 include_directories( ${PYTHON_INCLUDE_DIRS} )
41 42  
42 43 find_package( Boost COMPONENTS python REQUIRED )
43   -
44 44 include_directories( ${Boost_INCLUDE_DIR} )
45 45  
46 46 # Link to the Orfeo ToolBox
47   -FIND_PACKAGE(OTB)
48   -IF(OTB_FOUND)
49   - INCLUDE(${OTB_USE_FILE})
50   -ELSE(OTB_FOUND)
51   - MESSAGE(FATAL_ERROR
  47 +# LIS required OTB 6.0 which provides patch regarding management of 1 byte tiff image)
  48 +SET(OTB_MIN_VERSION "6.0.0")
  49 +
  50 +find_package(OTB ${OTB_MIN_VERSION} REQUIRED)
  51 +if(OTB_FOUND)
  52 + include(${OTB_USE_FILE})
  53 +else(OTB_FOUND)
  54 + message(FATAL_ERROR
52 55 "OTB not found. Please set OTB_DIR")
53   -ENDIF(OTB_FOUND)
  56 +endif(OTB_FOUND)
54 57  
55 58  
56 59 # Output directories
57 60 set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY ${lis_BINARY_DIR}/bin CACHE INTERNAL "Single output directory for all ARCHIVE products (static libs, import libs)")
58   -set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${lis_BINARY_DIR}/bin CACHE INTERNAL "Single output directory for all LIBRARY products (so, modules)")
  61 +set(CMAKE_LIBRARY_OUTPUT_DIRECTORY ${lis_BINARY_DIR}/lib CACHE INTERNAL "Single output directory for all LIBRARY products (so, modules)")
59 62 set(CMAKE_RUNTIME_OUTPUT_DIRECTORY ${lis_BINARY_DIR}/bin CACHE INTERNAL "Single output directory for all RUNTIME products (executables, dlls)")
60 63  
61 64 # Install dirs
... ... @@ -71,7 +74,7 @@ if(NOT lis_INSTALL_INCLUDE_DIR)
71 74 set(lis_INSTALL_INCLUDE_DIR "include")
72 75 endif(NOT lis_INSTALL_INCLUDE_DIR)
73 76  
74   -SET(BUILD_SHARED_LIBS ON)
  77 +set(BUILD_SHARED_LIBS ON)
75 78  
76 79 add_subdirectory(src)
77 80 add_subdirectory(python)
... ...
README.md
... ... @@ -26,7 +26,7 @@ All the parameters of the algorithm, paths to input and output data are stored i
26 26  
27 27 Moreover The JSON schema is available in the [Algorithm theoritical basis documentation](doc/tex/ATBD_CES-Neige.tex) and gives more information about the roles of these parameters.
28 28  
29   -NB: To build DEM data download the SRTM files corresponding to the study area and build the .vrt using gdalbuildvrt. Edit config.json file to activate preprocessing : Set "preprocessing" to true and set the vrt path.
  29 +NB: To build DEM data download the SRTM files corresponding to the study area and build the .vrt using gdalbuildvrt. Edit config.json file to activate preprocessing : Set "preprocessing" to true and set the vrt path.
30 30  
31 31  
32 32 ## Products format
... ... @@ -35,14 +35,14 @@ NB: To build DEM data download the SRTM files corresponding to the study area an
35 35 * SNOW_ALL: Binary mask of snow and clouds.
36 36 * 1st bit: Snow mask after pass1
37 37 * 2nd bit: Snow mask after pass2
38   - * 3rd bit: Clouds detected at pass0
  38 + * 3rd bit: Clouds detected at pass0
39 39 * 4th bit: Clouds refined at pass0
40 40  
41   -For example if you want to get the snow from pass1 and clouds detected from pass1 you need to do:
  41 +For example if you want to get the snow from pass1 and clouds detected from pass1 you need to do:
42 42 ```python
43   -pixel_value & 00000101
  43 +pixel_value & 00000101
44 44 ```
45   -* SEB: Raster image of the snow mask and cloud mask.
  45 +* SEB: Raster image of the snow mask and cloud mask.
46 46 * 0: No-snow
47 47 * 100: Snow
48 48 * 205: Cloud including cloud shadow
... ... @@ -71,10 +71,10 @@ Code to generate the snow cover extent product on Theia platform.
71 71  
72 72 ### Dependencies
73 73  
74   -lis dependencies:
  74 +lis dependencies:
75 75  
76 76 GDAL >=2.0
77   -OTB >= 5.0
  77 +OTB >= 6.0
78 78 Boost-Python
79 79 Python interpreter >= 2.7
80 80 Python libs >= 2.7
... ... @@ -110,52 +110,26 @@ Run make in your build folder.
110 110 ```bash
111 111 make
112 112 ```
113   -To install s2snow python module.
  113 +To install let-it-snow application and the s2snow python module.
114 114 In your build folder:
115 115 ```bash
116   -cd python
117   -python setup.py install
  116 +make install
118 117 ```
119   -or
120   -```bash
121   -python setup.py install --user
122   -```
123   -Update environment variables for LIS. Make sure that OTB and other dependencies directories are set in your environment variables:
124   -```bash
125   -export PYTHONPATH=/your/build/directory/bin/:$PYTHONPATH
126   -export PATH=/your/build/directory/bin:$PATH
127   -```
128   -let-it-snow is now installed.
129   -
130   -#### On venuscalc
131 118  
132   -To configure OTB 5.2.1:
133   -
134   -Create a bash file which contains:
  119 +Add appropriate executable rights
135 120 ```bash
136   -source /mnt/data/home/grizonnetm/config_otb_5.6.sh
  121 +chmod -R 755 ${install_dir}
137 122 ```
138 123  
139   -Then build the lis project using cmake
140   -```bash
141   -cd $build_dir
142   -cmake -DCMAKE_CXX_FLAGS:STRING=-std=c++11 -DCMAKE_CXX_COMPILER:FILEPATH=/usr/bin/g++-4.8 -DCMAKE_C_COMPILER:FILEPATH=/usr/bin/gcc-4.8 -DCMAKE_BUILD_TYPE=Release -DBUILD_TESTING=ON -DGDAL_INCLUDE_DIR=/mnt/data/home/grizonnetm/build/OTB-install/include -DGDAL_LIBRARY=/mnt/data/home/grizonnetm/build/OTB-install/lib/libgdal.so $source_dir
143   -make
144   -```
145   -To install s2snow python module.
146   -In your build folder:
147   -```bash
148   -cd python
149   -python setup.py install
150   -```
151   -or
152   -```bash
153   -python setup.py install --user
154   -```
  124 +The files will be installed by default into /usr/local and add to the python default modules.
  125 +To overide this behavior, the variable CMAKE_INSTALL_PREFIX must be configure before build step.
  126 +
155 127 Update environment variables for LIS. Make sure that OTB and other dependencies directories are set in your environment variables:
156 128 ```bash
157   -export PYTHONPATH=/your/build/directory/bin/:$PYTHONPATH
158   -export PATH=/your/build/directory/bin:$PATH
  129 +export PATH=/your/install/directory/bin:/your/install/directory/app:$PATH
  130 +export LD_LIBRARY_PATH=/your/install/directory/lib:$LD_LIBRARY_PATH
  131 +export OTB_APPLICATION_PATH=/your/install/directory/lib:$OTB_APPLICATION_PATH
  132 +export PYTHONPATH=/your/install/directory/lib:/your/install/directory/lib/python2.7/site-packages:$PYTHONPATH
159 133 ```
160 134 let-it-snow is now installed.
161 135  
... ... @@ -165,8 +139,8 @@ Enable tests with BUILD_TESTING cmake option. Use ctest to run tests. Do not for
165 139  
166 140 Data (input and baseline) to run validation tests are available on [Zenodo](http://doi.org/10.5281/zenodo.166511).
167 141  
168   -Download LIS-Data and extract the folder. It contains all the data needed to run tests. Set Data-LIS path var in cmake configuration files.
169   -Baseline : Baseline data folder. It contains output files of S2Snow that have been reviewed and validated.
  142 +Download LIS-Data and extract the folder. It contains all the data needed to run tests. Set Data-LIS path var in cmake configuration files.
  143 +Baseline : Baseline data folder. It contains output files of S2Snow that have been reviewed and validated.
170 144 Data-Test : Test data folder needed to run tests. It contains Landsat, Take5 and SRTM data.
171 145 Output-Test : Temporary output tests folder.
172 146 Do not modify these folders.
... ...
app/CMakeLists.txt
1   -file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_detector.py DESTINATION ${CMAKE_BINARY_DIR}/app)
2   -file(COPY ${CMAKE_CURRENT_SOURCE_DIR}/run_cloud_removal.py DESTINATION ${CMAKE_BINARY_DIR}/app)
3 1 \ No newline at end of file
  2 +file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_detector.py DESTINATION ${CMAKE_BINARY_DIR}/app)
  3 +file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/run_cloud_removal.py DESTINATION ${CMAKE_BINARY_DIR}/app)
  4 +file(INSTALL ${CMAKE_CURRENT_SOURCE_DIR}/build_json.py DESTINATION ${CMAKE_BINARY_DIR}/app)
  5 +
  6 +install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/run_snow_detector.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
  7 +install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/run_cloud_removal.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
  8 +install(FILES ${CMAKE_CURRENT_SOURCE_DIR}/build_json.py DESTINATION ${CMAKE_INSTALL_PREFIX}/app)
... ...
app/build_json.py
1   -#!/usr/bin/env python
2   -
  1 +#!/usr/bin/python
  2 +import os
  3 +import re
3 4 import sys
4   -import os.path as op
5 5 import json
  6 +import logging
6 7 import argparse
7 8  
8   -def show_help():
9   - """Show help of the build_json script for theia N2A products"""
10   - print "This script is used to build json configuration file use then to compute snow mask using OTB applications on Spot/LandSat/Sentinel-2 products from theia platform"
11   - print "Usage: python build_theia_json -s [landsat|s2|take5] -d image_directory -e srtm_tile -o file.json"
12   - print "python run_snow_detector.py help to show help"
  9 +### Configuration Template ###
  10 +conf_template = {"general":{"pout":"",
  11 + "nodata":-10000,
  12 + "ram":1024,
  13 + "nb_threads":1,
  14 + "generate_vector":False,
  15 + "preprocessing":False,
  16 + "log":True,
  17 + "multi":1},
  18 + "inputs":{"green_band":{"path": "",
  19 + "noBand": 1},
  20 + "red_band":{"path": "",
  21 + "noBand": 1},
  22 + "swir_band":{"path": "",
  23 + "noBand": 1},
  24 + "dem":"",
  25 + "cloud_mask":""},
  26 + "snow":{"dz":100,
  27 + "ndsi_pass1":0.4,
  28 + "red_pass1":200,
  29 + "ndsi_pass2":0.15,
  30 + "red_pass2":40,
  31 + "fsnow_lim":0.1,
  32 + "fsnow_total_lim":0.001},
  33 + "cloud":{"shadow_in_mask":64,
  34 + "shadow_out_mask":128,
  35 + "all_cloud_mask":1,
  36 + "high_cloud_mask":32,
  37 + "rf":12,
  38 + "red_darkcloud":500,
  39 + "red_backtocloud":100}}
  40 +
  41 +### Mission Specific Parameters ###
  42 +S2_parameters = {"multi":10,
  43 + "green_band":".*FRE_B3.*\.tif$",
  44 + "green_bandNumber":1,
  45 + "red_band":".*FRE_B4.*\.tif$",
  46 + "red_bandNumber":1,
  47 + "swir_band":".*FRE_B11.*\.tif$",
  48 + "swir_bandNumber":1,
  49 + "cloud_mask":".*CLM_R2\.tif$",
  50 + "dem":".*ALT_R2\.TIF$",
  51 + "shadow_in_mask":32,
  52 + "shadow_out_mask":64,
  53 + "all_cloud_mask":1,
  54 + "high_cloud_mask":128,
  55 + "rf":12}
  56 +
  57 +Take5_parameters = {"multi":1,
  58 + "green_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
  59 + "green_bandNumber":1,
  60 + "red_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
  61 + "red_bandNumber":2,
  62 + "swir_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
  63 + "swir_bandNumber":4,
  64 + "cloud_mask":".*NUA.*\.TIF$",
  65 + "dem":".*\.tif",
  66 + "shadow_in_mask":64,
  67 + "shadow_out_mask":128,
  68 + "all_cloud_mask":1,
  69 + "high_cloud_mask":32,
  70 + "rf":8}
  71 +
  72 +L8_parameters = {"multi":1,
  73 + "green_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
  74 + "green_bandNumber":3,
  75 + "red_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
  76 + "red_bandNumber":4,
  77 + "swir_band":".*ORTHO_SURF_CORR_PENTE.*\.TIF$",
  78 + "swir_bandNumber":6,
  79 + "cloud_mask":".*NUA.*\.TIF$",
  80 + "dem":".*\.tif",
  81 + "shadow_in_mask":64,
  82 + "shadow_out_mask":128,
  83 + "all_cloud_mask":1,
  84 + "high_cloud_mask":32,
  85 + "rf":8}
  86 +
  87 +mission_parameters = {"S2":S2_parameters,\
  88 + "LANDSAT8":L8_parameters,\
  89 + "Take5":Take5_parameters}
  90 +
  91 +def findFiles(folder, pattern):
  92 + """ Search recursively into a folder to find a patern match
  93 + """
  94 + matches = []
  95 + for root, dirs, files in os.walk(folder):
  96 + for file in files:
  97 + if re.match(pattern, file):
  98 + matches.append(os.path.join(root, file))
  99 + return matches
13 100  
  101 +def read_product(inputPath, mission):
  102 + """ Read the content of the input product folder
  103 + and load the data information required for snow detection.
  104 + """
  105 + if os.path.exists(inputPath):
  106 + params = mission_parameters[mission]
  107 + conf_json = conf_template
14 108  
15   -#----------------- MAIN ---------------------------------------------------
  109 + conf_json["general"]["multi"] = params["multi"]
  110 +
  111 + conf_json["inputs"]["green_band"]["path"] = findFiles(inputPath, params["green_band"])[0]
  112 + conf_json["inputs"]["red_band"]["path"] = findFiles(inputPath, params["red_band"])[0]
  113 + conf_json["inputs"]["swir_band"]["path"] = findFiles(inputPath, params["swir_band"])[0]
  114 + conf_json["inputs"]["green_band"]["noBand"] = params["green_bandNumber"]
  115 + conf_json["inputs"]["red_band"]["noBand"] = params["red_bandNumber"]
  116 + conf_json["inputs"]["swir_band"]["noBand"] = params["swir_bandNumber"]
  117 + conf_json["inputs"]["cloud_mask"] = findFiles(inputPath, params["cloud_mask"])[0]
  118 + result = findFiles(os.path.join(inputPath, "SRTM"), params["dem"])
  119 + if result:
  120 + conf_json["inputs"]["dem"] = result[0]
  121 + else:
  122 + logging.warning("No DEM found!")
  123 +
  124 + conf_json["cloud"]["shadow_in_mask"] = params["shadow_in_mask"]
  125 + conf_json["cloud"]["shadow_out_mask"] = params["shadow_out_mask"]
  126 + conf_json["cloud"]["all_cloud_mask"] = params["all_cloud_mask"]
  127 + conf_json["cloud"]["high_cloud_mask"] = params["high_cloud_mask"]
  128 + conf_json["cloud"]["rf"] = params["rf"]
  129 +
  130 + return conf_json
  131 + else:
  132 + logging.error(inputPath + " doesn't exist.")
16 133  
17 134 def main():
18   - """ Script to build json from theia N2A product"""
  135 + # Parse arguments
  136 + parser = argparse.ArgumentParser(description='This script is used to \
  137 + generate the snow detector configuration json file.\
  138 + This configuration requires at least the input product path\
  139 + and the output path in which will be generated snow product.')
  140 +
  141 + parser.add_argument("inputPath", help="input product path \
  142 + (supports S2/L8/Take5 products)")
  143 + parser.add_argument("outputPath", help="output folder for the json configuration file, \
  144 + and also the configured output path for the snow product")
  145 +
  146 + group_general = parser.add_argument_group('general', 'general parameters')
  147 + group_general.add_argument("-nodata", type=int)
  148 + group_general.add_argument("-ram", type=int)
  149 + group_general.add_argument("-nb_threads", type=int)
  150 + #group_general.add_argument("-generate_vector", type=bool)
  151 + #group_general.add_argument("-preprocessing", type=bool)
  152 + #group_general.add_argument("-log", type=bool)
  153 + group_general.add_argument("-multi", type=float)
  154 +
19 155  
20   - parser = argparse.ArgumentParser(description='Build json from THEIA product')
  156 + group_snow = parser.add_argument_group('inputs', 'input files')
  157 + group_general.add_argument("-dem", help="dem file path, to use for processing the input product")
21 158  
22   - parser.add_argument("-s", help="select input sensors")
23   - parser.add_argument("-d", help="input dir")
24   - parser.add_argument("-o", help="input dir")
25   - parser.add_argument("-do", help="input dir")
  159 + group_snow = parser.add_argument_group('snow', 'snow parameters')
  160 + group_snow.add_argument("-dz", type=int)
  161 + group_snow.add_argument("-ndsi_pass1", type=float)
  162 + group_snow.add_argument("-red_pass1", type=float)
  163 + group_snow.add_argument("-ndsi_pass2", type=float)
  164 + group_snow.add_argument("-red_pass2", type=float)
  165 + group_snow.add_argument("-fsnow_lim", type=float)
  166 + group_snow.add_argument("-fsnow_total_lim", type=float)
  167 +
  168 + group_cloud = parser.add_argument_group('cloud', 'cloud parameters')
  169 + group_cloud.add_argument("-shadow_in_mask", type=int)
  170 + group_cloud.add_argument("-shadow_out_mask", type=int)
  171 + group_cloud.add_argument("-all_cloud_mask", type=int)
  172 + group_cloud.add_argument("-high_cloud_mask", type=int)
  173 + group_cloud.add_argument("-rf", type=int)
  174 + group_cloud.add_argument("-red_darkcloud", type=int)
  175 + group_cloud.add_argument("-red_backtocloud", type=int)
26 176  
27 177 args = parser.parse_args()
28   -
29   - #print(args.accumulate(args.integers))
30   -
31   - #Parse sensor
32   - if (args.s == 's2'):
33   - multi=10
34   - #Build json file
35   - data = {}
36   -
37   - data["general"]={
38   - "pout":args.do,
39   - "nodata":-10000,
40   - "ram":1024,
41   - "nb_threads":1,
42   - "generate_vector":"false",
43   - "preprocessing":"false",
44   - "log":"true",
45   - "multi":10
46   - }
47   -
48   - data["cloud"]={
49   - "shadow_mask":32,
50   - "all_cloud_mask":1,
51   - "high_cloud_mask":128,
52   - "rf":12,
53   - "red_darkcloud":500,
54   - "red_backtocloud":100
55   - }
56   - data["snow"]={
57   - "dz":100,
58   - "ndsi_pass1":0.4,
59   - "red_pass1":200,
60   - "ndsi_pass2":0.15,
61   - "red_pass2":120,
62   - "fsnow_lim":0.1,
63   - "fsnow_total_lim":0.001
64   - }
65   -
66   - fp = open(args.o, 'w')
67   - fp.write(json.dumps(data,indent=4, sort_keys=True))
68   - fp.close()
69   -
  178 +
  179 + inputPath = os.path.abspath(args.inputPath)
  180 + outputPath = os.path.abspath(args.outputPath)
  181 +
  182 + if ("S2" in inputPath) or ("SENTINEL2" in inputPath):
  183 + jsonData = read_product(inputPath, "S2")
  184 + elif "Take5" in inputPath:
  185 + jsonData = read_product(inputPath, "Take5")
  186 + elif "LANDSAT8" in inputPath:
  187 + jsonData = read_product(inputPath, "LANDSAT8")
  188 + else:
  189 + logging.error("Unknown product type.")
  190 +
  191 + if jsonData:
  192 + if not os.path.exists(outputPath):
  193 + os.makedirs(outputPath)
  194 +
  195 + jsonData["general"]["pout"] = outputPath
  196 +
  197 + # Overide parameters for group general
  198 + if args.nodata:
  199 + jsonData["general"]["nodata"] = args.nodata
  200 + if args.ram:
  201 + jsonData["general"]["ram"] = args.ram
  202 + if args.nb_threads:
  203 + jsonData["general"]["nb_threads"] = args.nb_threads
  204 + if args.multi:
  205 + jsonData["general"]["multi"] = args.multi
  206 +
  207 + # Overide dem location
  208 + if args.dem:
  209 + jsonData["inputs"]["dem"] = os.path.abspath(args.dem)
  210 +
  211 + # Overide parameters for group snow
  212 + if args.dz:
  213 + jsonData["snow"]["dz"] = args.dz
  214 + if args.ndsi_pass1:
  215 + jsonData["snow"]["ndsi_pass1"] = args.ndsi_pass1
  216 + if args.red_pass1:
  217 + jsonData["snow"]["red_pass1"] = args.red_pass1
  218 + if args.ndsi_pass2:
  219 + jsonData["snow"]["ndsi_pass2"] = args.ndsi_pass2
  220 + if args.red_pass2:
  221 + jsonData["snow"]["red_pass2"] = args.red_pass2
  222 + if args.fsnow_lim:
  223 + jsonData["snow"]["fsnow_lim"] = args.fsnow_lim
  224 + if args.fsnow_total_lim:
  225 + jsonData["snow"]["fsnow_total_lim"] = args.fsnow_total_lim
  226 +
  227 + # Overide parameters for group cloud
  228 + if args.shadow_in_mask:
  229 + jsonData["cloud"]["shadow_in_mask"] = args.shadow_in_mask
  230 + if args.shadow_out_mask:
  231 + jsonData["cloud"]["shadow_out_mask"] = args.shadow_out_mask
  232 + if args.all_cloud_mask:
  233 + jsonData["cloud"]["all_cloud_mask"] = args.all_cloud_mask
  234 + if args.high_cloud_mask:
  235 + jsonData["cloud"]["high_cloud_mask"] = args.high_cloud_mask
  236 + if args.rf:
  237 + jsonData["cloud"]["rf"] = args.rf
  238 + if args.red_darkcloud:
  239 + jsonData["cloud"]["red_darkcloud"] = args.red_darkcloud
  240 + if args.red_backtocloud:
  241 + jsonData["cloud"]["red_backtocloud"] = args.red_backtocloud
  242 +
  243 + jsonFile = open(os.path.join(outputPath, "param_test.json"), "w")
  244 + jsonFile.write(json.dumps(jsonData, indent=4))
  245 + jsonFile.close()
  246 +
70 247 if __name__ == "__main__":
  248 + # Set logging level and format.
  249 + logging.basicConfig(stream=sys.stdout, level=logging.INFO, format=\
  250 + '%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
71 251 main()
... ...
app/run_snow_detector.py
... ... @@ -3,9 +3,10 @@
3 3 import sys
4 4 import os.path as op
5 5 import json
  6 +import logging
6 7 from s2snow import snow_detector
7 8  
8   -VERSION = "1.2"
  9 +VERSION = "1.3"
9 10  
10 11  
11 12 def show_help():
... ... @@ -39,6 +40,12 @@ def main(argv):
39 40 sys.stdout = open(op.join(pout, "stdout.log"), 'w')
40 41 sys.stderr = open(op.join(pout, "stderr.log"), 'w')
41 42  
  43 + # Set logging level and format.
  44 + logging.basicConfig(stream=sys.stdout, level=logging.INFO, format='%(asctime)s - %(filename)s:%(lineno)s - %(levelname)s - %(message)s')
  45 + logging.info("Start run_snow_detector.py")
  46 + logging.info("Input args = " + json_file)
  47 +
  48 + # Run the snow detector
42 49 sd = snow_detector.snow_detector(data)
43 50 sd.detect_snow(2)
44 51  
... ...
config/param_cloudremoval_template.json
... ... @@ -1,23 +0,0 @@
1   -{
2   - "general":{
3   - "pout":"outputdir",
4   - "ram":512,
5   - "nbThreads":1,
6   - "stats":false
7   - },
8   - "inputs":{
9   - "m2Path":"m2path",
10   - "m1Path":"m1path",
11   - "t0Path":"t0path",
12   - "p1Path":"p1path",
13   - "p2Path":"p2path",
14   - "demPath":"dempath",
15   - "refPath":"refpath",
16   - },
17   - "steps":{
18   - "s1":true,
19   - "s2":true,
20   - "s3":true,
21   - "s4":true
22   - }
23   -}
config/param_full_Landsat8_template.json
... ... @@ -1,45 +0,0 @@
1   -{
2   - "general":{
3   - "pout":"outputdir",
4   - "nodata":-10000,
5   - "ram":1024,
6   - "nb_threads":1,
7   - "generate_vector":false,
8   - "preprocessing":false,
9   - "log":true
10   - },
11   - "inputs":{
12   - "green_band": {
13   - "path": "inputimage",
14   - "noBand": 3
15   - },
16   - "red_band": {
17   - "path": "inputimage",
18   - "noBand": 4
19   - },
20   - "swir_band": {
21   - "path": "inputimage",
22   - "noBand": 6
23   - },
24   - "dem":"inputdem",
25   - "cloud_mask":"inputcloud"
26   - },
27   - "cloud":
28   - {
29   - "shadow_mask":64,
30   - "all_cloud_mask":0,
31   - "high_cloud_mask":64,
32   - "rf":8,
33   - "red_darkcloud":650,
34   - "red_backtocloud":100
35   - },
36   - "snow":{
37   - "dz":100,
38   - "ndsi_pass1":0.4,
39   - "red_pass1":200,
40   - "ndsi_pass2":0.15,
41   - "red_pass2":120,
42   - "fsnow_lim":0.1,
43   - "fsnow_total_lim":0.001
44   - }
45   -}
config/param_full_Take5_template.json
... ... @@ -1,33 +0,0 @@
1   -{
2   - "general":{
3   - "pout":"outputdir",
4   - "shadow_value":64,
5   - "ram":1024,
6   - "nbThreads":1,
7   - "mode":"spot",
8   - "generate_vector":false,
9   - "preprocessing":false,
10   - "postprocessing":false
11   - },
12   - "inputs":{
13   - "vrt":"inputvrt",
14   - "image":"inputimage",
15   - "dem":"inputdem",
16   - "cloud_mask":"inputcloud"
17   - },
18   - "cloud_mask":{
19   - "rf":8,
20   - "rRed_darkcloud":650,
21   - "rRed_backtocloud":100
22   - },
23   - "snow":{
24   - "dz":100,
25   - "ndsi_pass1":0.4,
26   - "rRed_pass1":200,
27   - "ndsi_pass2":0.15,
28   - "rRed_pass2":120,
29   - "fsnow_lim":0.1,
30   - "fsnow_total_lim":0.001
31   - }
32   -}
33   -
config/param_s2.json
... ... @@ -1,29 +0,0 @@
1   -{
2   - "general":{
3   - "pout":"/mnt/data/home/grizonnetm/temporary/s2",
4   - "shadow_value":32,
5   - "ram":1024,
6   - "mode":"s2",
7   - "generate_vector":false
8   - },
9   - "inputs":{
10   - "image":"/mnt/data/SENTINEL2/N2A_TEC/30TYN/S2A_OPER_SSC_L2VALD_30TYN____20150706/S2A_OPER_SSC_L2VALD_30TYN____20150706.DBL.DIR",
11   - "dem":"/mnt/data/home/grizonnetm/SRTM-S2/srtm_superimpose.tif",
12   - "cloud_mask":"/mnt/data/SENTINEL2/N2A_TEC/30TYN/S2A_OPER_SSC_L2VALD_30TYN____20150706/S2A_OPER_SSC_L2VALD_30TYN____20150706.DBL.DIR/S2A_OPER_SSC_PDTANX_L2VALD_30TYN____20150706_CLD_R2.DBL.TIF"
13   - },
14   - "cloud_mask":{
15   - "rf":8,
16   - "rRed_darkcloud":500,
17   - "rRed_backtocloud":100
18   - },
19   - "snow":{
20   - "dz":100,
21   - "ndsi_pass1":0.4,
22   - "rRed_pass1":200,
23   - "ndsi_pass2":0.15,
24   - "rRed_pass2":120,
25   - "fsnow_lim":0.1,
26   - "fsnow_total_lim":0.001
27   - }
28   -}
29   -
config/param_test_s2.json
... ... @@ -1,46 +0,0 @@
1   -{
2   - "general":{
3   - "pout":"/home/klempkat/let-it-snow/muscate/output",
4   - "nodata":-10000,
5   - "ram":1024,
6   - "nb_threads":1,
7   - "generate_vector":false,
8   - "preprocessing":false,
9   - "log":true
10   - },
11   - "inputs":{
12   - "green_band": {
13   - "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B3.tif",
14   - "noBand": 1
15   - },
16   - "red_band": {
17   - "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B4.tif",
18   - "noBand": 1
19   - },
20   - "swir_band": {
21   - "path": "/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_FRE_B11.tif",
22   - "noBand": 1
23   - },
24   - "dem":"/home/grizonnetm/Data-Neige/MUSCATE/MNT_S2_N2/S2__TEST_AUX_REFDE2_T29RNQ_0001_1.0/S2__TEST_AUX_REFDE2_T29RNQ_0001/S2__TEST_AUX_REFDE2_T29RNQ_0001.DBL.DIR/S2__TEST_AUX_REFDE2_T29RNQ_0001_ALT_R2.TIF",
25   - "cloud_mask":"/home/grizonnetm/Data-Neige/MUSCATE/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0/MASKS/SENTINEL2A_20160217-111843-605_L2A_T29RNQ_D_V1-0_CLM_R2.tif"
26   - },
27   - "cloud":
28   - {
29   - "shadow_mask":64,
30   - "all_cloud_mask":0,
31   - "high_cloud_mask":32,
32   - "rf":12,
33   - "red_darkcloud":650,
34   - "red_backtocloud":100
35   - },
36   - "snow":{
37   - "dz":100,
38   - "ndsi_pass1":0.4,
39   - "red_pass1":200,
40   - "ndsi_pass2":0.15,
41   - "red_pass2":120,
42   - "fsnow_lim":0.1,
43   - "fsnow_total_lim":0.001
44   - }
45   -}
46   -
doc/tex/ATBD_CES-Neige.pdf
No preview for this file type
doc/tex/ATBD_CES-Neige.tex
... ... @@ -333,7 +333,7 @@ Parameter & Description & Name in the configuration file & Default value\\
333 333 \textcolor{red}{$n_1$} & Minimum value of the NDSI for the pass 1 snow test & \texttt{ndsi\_pass1} & 0.400\\
334 334 \textcolor{red}{$n_2$} & Minimum value of the NDSI for the pass 2 snow test & \texttt{ndsi\_pass2} & 0.150\\
335 335 \textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 1 snow test & \texttt{rRed\_pass1} & 0.200 \\
336   -\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 2 snow test & \texttt{rRed\_pass2} & 0.120 \
  336 +\textcolor{red}{$r_1$} & Minimum value of the red band reflectance the pass 2 snow test & \texttt{rRed\_pass2} & 0.040 \
337 337 \textcolor{red}{$d_z$} & Minimum snow fraction in an elevation band to define $z_s$ & \texttt{fsnow\_lim} & 0.100 \\
338 338 \textcolor{red}{$f_t$} & Minimum snow fraction in an elevation band to define $z_s$ & \texttt{fsnow\_lim} & 0.100 \\
339 339 \textcolor{red}{$f_s$} & Minimum snow fraction in the image to activate the pass 2 snow test & \texttt{fsnow\_total\_lim} & 0.001 \\
... ...
doc/tex/schema.json
... ... @@ -239,7 +239,7 @@
239 239 "type": "integer"
240 240 },
241 241 "red_pass2": {
242   - "default": 120,
  242 + "default": 40,
243 243 "description": "Minimum value of the red band reflectance the pass 2 snow test.",
244 244 "id": "red_pass2",
245 245 "title": "The Red_pass2 schema.",
... ...
hpc/LIS_SEB_style_OTB.txt 0 โ†’ 100644
... ... @@ -0,0 +1,10 @@
  1 +##Color table use to produce LIS SEB using OTB Color Mapping application
  2 +
  3 +# cyan: snow
  4 +100 0 255 255
  5 +# grey: no snow
  6 +0 119 119 119
  7 +# white: cloud
  8 +205 255 255 255
  9 +#black: no data
  10 +254 0 0 0
... ...
hpc/build_module.sh
... ... @@ -1,52 +0,0 @@
1   -#!/bin/bash
2   -
3   -export MODULEPATH=$MODULEPATH:/work/logiciels/modulefiles/
4   -
5   -module purge
6   -
7   -module load cmake/3.0.2
8   -module load gcc
9   -module load boost
10   -module load otb/5.6
11   -module load python/2.7.5
12   -
13   -pkg="lis"
14   -version="cloudremoval"
15   -name=$pkg-$version
16   -src=$DATACI/modules/repository/$name/lis
17   -install_dir=$DATACI/modules/repository/$name/$name-install
18   -log=$DATACI/modules/repository/$name/build.log
19   -data_root=$DATACI/modules/repository/$name/Data-LIS
20   -
21   -echo "Building $pkg version $version ..."
22   -
23   -# clean previous build
24   -rm -rf $install_dir/*
25   -rm $log
26   -
27   -mkdir -p $install_dir
28   -cd $install_dir
29   -
30   -#setup ENV for testing
31   -export PATH=$install_dir/bin:$PATH
32   -export LD_LIBRARY_PATH=$install_dir/bin:$LD_LIBRARY_PATH
33   -export PYTHONPATH=$install_dir/bin:$install_dir/bin/lib/python2.7/site-packages:$PYTHONPATH
34   -
35   -echo "Configuring ..."
36   -CC=$GCCHOME/bin/gcc CXX=$GCCHOME/bin/g++ cmake -DBUILD_TESTING=ON -DLIS_DATA_ROOT=$data_root -DCMAKE_BUILD_TYPE=Release -DCMAKE_C_COMPILER:STRING=$CC -DCMAKE_CXX_COMPILER:STRING=$CXX -DCMAKE_CXX_FLAGS="-std=c++11" -DPYTHON_LIBRARY=${PYTHONHOME}/lib -DPYTHON_INCLUDE_DIR=${PYTHONHOME}/include/python2.7 -DGDAL_INCLUDE_DIR=/work/logiciels/otb/5.2.1/include $src &>> $log
37   -
38   -echo "Building ..."
39   -make -j2 &>> $log
40   -
41   -cd $install_dir/python
42   -python setup.py install -f --prefix $install_dir/bin
43   -
44   -#add install_dir to qtis linux group and appropriate rights
45   -chmod -R 755 $install_dir
46   -chmod -R 755 $data_root
47   -
48   -echo "Launch LIS tests ..."
49   -cd $install_dir
50   -ctest -VV &>> $log
51   -
52   -echo "Done. Check $log for build details."
hpc/makefigureTile_lis_Sentinel2_cluster_muscate.sh 0 โ†’ 100644
... ... @@ -0,0 +1,73 @@
  1 +#!/bin/bash
  2 +#PBS -N TheiaNViz
  3 +#PBS -j oe
  4 +#PBS -l select=1:ncpus=4:mem=10gb
  5 +#PBS -l walltime=00:55:00
  6 +# make output figures for a better vizualisation
  7 +# qsub -v tile="29SRQ" makefigureTile_lis_Sentinel2_cluster.sh
  8 +
  9 +# IM was compiled with openMP in hal
  10 +MAGICK_THREAD_LIMIT=4 ; export MAGICK_THREAD_LIMIT
  11 +MAGICK_MAP_LIMIT=2000Mb
  12 +MAGICK_MEMORY_LIMIT=2000Mb
  13 +MAGICK_AREA_LIMIT=2000Mb
  14 +export MAGICK_MAP_LIMIT MAGICK_MEMORY_LIMIT MAGICK_AREA_LIMIT
  15 +ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS=4 ; export ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS
  16 +
  17 +# input folder: LIS products path
  18 +pin="/work/OT/siaa/Theia/Neige/output_muscate_v2/"
  19 +
  20 +# output folder: LIS figure path
  21 +pout="/work/OT/siaa/Theia/Neige/output_muscate_v2/figures/"
  22 +
  23 +# load otb
  24 +module load otb
  25 +
  26 +# Tile to process
  27 +if [ -z $tile ]; then
  28 + echo "tile is not set, using 31TCH"
  29 + tile=31TCH
  30 +fi
  31 +
  32 +# export colored mask in tif using otb
  33 +for img in $(find $pin/T$tile/ -name *SEB.TIF)
  34 +do
  35 + echo $img
  36 + tiledate=$(basename $(dirname $(dirname $img)))
  37 + lab=${tiledate:11:8}
  38 + y=${lab:0:4}
  39 + m=${lab:4:2}
  40 + d=${lab:6:2}
  41 + labd=$y-$m-$d
  42 + echo $labd
  43 + pout2=$pout/$tile/$tiledate/$(basename $img .TIF)
  44 + echo $pout2
  45 + mkdir -p $pout2
  46 + imgout=$pout2/$labd.tif
  47 + otbcli_ColorMapping -progress false -in $img -out $imgout uint8 -method.custom.lut /work/OT/siaa/Theia/hpc_scripts/LIS_SEB_style_OTB.txt
  48 +# gdaldem color-relief $img /work/OT/siaa/Theia/hpc_scripts/LIS_SEB_style_v2.txt $imgout -exact_color_entry
  49 +done
  50 +
  51 +# export compo in jpg
  52 +for img in $(find $pin/T$tile/ -name *COMPO.TIF)
  53 +do
  54 + echo $img
  55 + tiledate=$(basename $(dirname $(dirname $img)))
  56 + lab=${img:`expr index "$img" A`+1:8}
  57 + y=${lab:0:4}
  58 + m=${lab:4:2}
  59 + d=${lab:6:2}
  60 + labd=$y-$m-$d
  61 + echo $labd
  62 + pout2=$pout/$tile/$tiledate/$(basename $img .TIF)
  63 + echo $pout2
  64 + mkdir -p $pout2
  65 + imgout=$pout2/$labd.jpg
  66 + convert $img $imgout
  67 +done
  68 +
  69 +# make mask montage
  70 +montage -geometry 10%x10%+2+2 -label %t -title "$tile Sentinel-2A (cyan: snow, grey: no snow, white: cloud, black: no data)" -pointsize 40 $pout/$tile/*/LIS_SEB/*.tif $pout/montage_"$tile"_maskcol_onetenthresolution.png
  71 +
  72 +# make compo montage
  73 +montage -geometry 10%x10%+2+2 -label %t -title "$tile Sentinel-2A (SWIR false color composites)" -pointsize 40 $pout/$tile/*/LIS_COMPO/*.jpg $pout/montage_"$tile"_compo_onetenthresolution.png
... ...
hpc/old/run_cloud_removal_cluster.sh 0 โ†’ 100644
... ... @@ -0,0 +1,122 @@
  1 +#!/bin/bash
  2 +#Script launching LIS cloud removal on linux3-ci
  3 +#
  4 +#Please setup USER CONFIG for your system before lauching this script
  5 +######################USER CONFIG####################################
  6 +#####################################################################
  7 +#lis app
  8 +lis_app=$HOME/lis/run_cloud_removal.py
  9 +#json template
  10 +lis_config=$HOME/lis/config/param_cloudremoval_template.json
  11 +#path where pbs script will be generated
  12 +lis_job_script_PBS=$HOME/lis/pbs/lis_job_cr.pbs
  13 +#path where config will be generated
  14 +lis_config_list=$HOME/lis/config/config_list_cr.conf
  15 +#pbs log
  16 +lis_log=$HOME/lis/log
  17 +#IO directories
  18 +data_input=$DATACI/test_cloudremoval/input
  19 +data_output=$DATACI/test_cloudremoval/output
  20 +#tiles to compute
  21 +tiles="N2A_EcrinsFranceD0000B0000"
  22 +stats=false
  23 +hsmin=2500
  24 +hsmax=3700
  25 +#####################################################################
  26 +
  27 +echo "Generating config list..."
  28 +rm $lis_config_list
  29 +tiles_nb=0
  30 +for tile in $tiles
  31 +do
  32 +pimg=$data_input/$tile
  33 +inputdem=$data_input/SRTM/$tile/$tile.tif
  34 +
  35 +imgarr=($pimg/*)
  36 +imgnb=$(find $pimg -mindepth 1 -maxdepth 1 -type d | wc -l)
  37 +slicemax=$(($imgnb-2))
  38 +
  39 +for i in `seq 2 $slicemax`
  40 +do
  41 + echo "$tile $inputdem ${imgarr[$i-2]} ${imgarr[$i-1]} ${imgarr[$i]} ${imgarr[$i+1]} ${imgarr[$i+2]}" >> $lis_config_list
  42 + ((tiles_nb++))
  43 +done
  44 +done
  45 +
  46 +echo "Done"
  47 +echo "Number of images to compute: $tiles_nb"
  48 +
  49 +echo "Generating pbs script..."
  50 +#Create pbs job script
  51 +cat <<EOF > $lis_job_script_PBS
  52 +#!/bin/bash
  53 +#PBS -N lis
  54 +#PBS -l select=1:ncpus=1
  55 +#PBS -l walltime=00:45:00
  56 +#PBS -o $lis_log
  57 +#PBS -e $lis_log
  58 +#PBS -J 1-${tiles_nb}:1
  59 +
  60 +tile=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f1)
  61 +dempath=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f2)
  62 +m2path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f3)
  63 +m1path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f4)
  64 +t0path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f5)
  65 +p1path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f6)
  66 +p2path=\$(sed -n \${PBS_ARRAY_INDEX}p $lis_config_list | cut -d ' ' -f7)
  67 +
  68 +#copy input data to tmp
  69 +#tmp directories
  70 +rm -r \$TMPCI/\$(basename \$t0path)_LIS_cr
  71 +data_tmp=\$TMPCI/\$(basename \$t0path)_LIS_cr
  72 +data_input_tmp=\$data_tmp/input
  73 +data_output_tmp=\$data_tmp/output
  74 +
  75 +mkdir -p \$data_input_tmp/\$tile/\$(basename \$t0path)
  76 +mkdir -p \$data_input_tmp/SRTM/\$tile
  77 +
  78 +cp -r \$t0path/* \$data_input_tmp/\$tile/\$(basename \$t0path)
  79 +cp \$dempath \$data_input_tmp/SRTM/\$tile/\$(basename \$dempath)
  80 +
  81 +t0path=\$data_input_tmp/\$tile/\$(basename \$t0path)
  82 +dempath=\$data_input_tmp/SRTM/\$tile/\$(basename \$dempath)
  83 +
  84 +#create json
  85 +config=\$t0path.json
  86 +cp $lis_config \$config
  87 +
  88 +#modify json
  89 +m2img=\$(find \$m2path -name *SEB.TIF)
  90 +m1img=\$(find \$m1path -name *SEB.TIF)
  91 +t0img=\$(find \$t0path -name *SEB.TIF)
  92 +p1img=\$(find \$p1path -name *SEB.TIF)
  93 +p2img=\$(find \$p2path -name *SEB.TIF)
  94 +pout=\$data_output_tmp/\$tile/\$(basename \$t0path)
  95 +mkdir -p \$pout
  96 +sed -i -e "s|outputdir|\$pout|g" \$config
  97 +sed -i -e "s|m2path|\$m2img|g" \$config
  98 +sed -i -e "s|m1path|\$m1img|g" \$config
  99 +sed -i -e "s|t0path|\$t0img|g" \$config
  100 +sed -i -e "s|p1path|\$p1img|g" \$config
  101 +sed -i -e "s|p2path|\$p2img|g" \$config
  102 +sed -i -e "s|dempath|\$dempath|g" \$config
  103 +sed -i -e "s|hsmax|$hsmax|g" \$config
  104 +sed -i -e "s|hsmin|$hsmin|g" \$config
  105 +
  106 +#run cloud removal
  107 +python $lis_app \$config
  108 +