Compare View
Commits (171)
-
Conflicts: scripts/simplification/RastersToSqlitePoint.py
-
…enerateFeatures.py scripts/common/fileUtils.py scripts/common/codeStrings.py scripts/common/NbView.py scripts/common/GenSensors.py
-
…onfig_vectorisation.cfg#
-
…SimplifyPoly.py scripts/vector-tools/RandomSelectionPolygonsAreaThreshold.py scripts/vector-tools/RandomSelectionPolygons.py
-
…rmonisation.py scripts/vector-tools/MultiPolyToPoly.py scripts/vector-tools/MergeFiles.py scripts/vector-tools/ForestDiff.py scripts/vector-tools/Difference.py scripts/vector-tools/DeleteDuplicateGeometries.py scripts/vector-tools/CountNbPolByAtt.py scripts/vector-tools/ConditionalFieldRecode.py scripts/vector-tools/BufferOgr.py
-
…li.computeFeatures (usefull if we don't use gapFilling)
Showing
69 changed files
Show diff stats
.gitignore
app/iota2FeatureExtraction.cxx
... | ... | @@ -73,7 +73,7 @@ private: |
73 | 73 | AddParameter(ParameterType_Int, "swir", |
74 | 74 | "Index for the SWIR band (starting at 1)."); |
75 | 75 | AddParameter(ParameterType_Float, "indfact", |
76 | - "Multiplicative factor for nomalized indices (default = 1000)."); | |
76 | + "Multiplicative factor for normalized indices (default = 1000)."); | |
77 | 77 | MandatoryOff("indfact"); |
78 | 78 | AddParameter(ParameterType_Float, "nodata", |
79 | 79 | "No data value (default = -10000)."); |
... | ... | @@ -91,6 +91,9 @@ private: |
91 | 91 | AddParameter(ParameterType_Empty, "keepduplicates", "Keep duplicate relative reflectances (true/false). Default value is false"); |
92 | 92 | MandatoryOff("keepduplicates"); |
93 | 93 | |
94 | + AddParameter(ParameterType_Empty, "acorfeat", "Apply atmospherically corrected features http://www.cesbio.ups-tlse.fr/multitemp/?p=12746 (true/false). Default value is false"); | |
95 | + MandatoryOff("acorfeat"); | |
96 | + | |
94 | 97 | |
95 | 98 | |
96 | 99 | AddRAMParameter(); |
... | ... | @@ -144,6 +147,11 @@ private: |
144 | 147 | std::cout << " relative index " << pars.ReferenceIndex << " \n"; |
145 | 148 | } |
146 | 149 | |
150 | + if(IsParameterEnabled("acorfeat")) | |
151 | + { | |
152 | + std::cout << " Atmospherically corrected features \n"; | |
153 | + pars.ACorFeat = true; | |
154 | + } | |
147 | 155 | |
148 | 156 | auto fef = FeatureExtractionFunctorType(pars); |
149 | 157 | m_FeatureExtractionFilter = FeatureExtractionFilterType::New(); | ... | ... |
config/Config_4Tuiles_Multi_FUS_Confidence.cfg
doc/running_iota.org
... | ... | @@ -210,6 +210,8 @@ Today, features computable are : NDVI, NDWI and the brightness. Only two sensors |
210 | 210 | | additionalFeatures | user features definition | must be a bandMath (OTB) expression, comma splited | additionalFeatures:"b1+b2,(b1-b2)/(b1+b2)" | |
211 | 211 | | useAdditionalFeatures | flag to indicate if the chain must use 'additionalFeatures' | must be 'True' or 'False' | useAdditionalFeatures:'False' | |
212 | 212 | | writeOutputs | flag to indicate if temporary files musk be written on disk (faster if set to 'False') | must be 'True' or 'False' | writeOutputs:'False' | |
213 | +| useGapFilling | flag to use temporal interpolation | must be 'True' or 'False' | useGapFilling : 'True' | | |
214 | + | |
213 | 215 | |
214 | 216 | |
215 | 217 | ... | ... |
include/iota2FeatureExtraction.h
... | ... | @@ -64,217 +64,231 @@ private: |
64 | 64 | }; |
65 | 65 | |
66 | 66 | template <typename T> |
67 | -constexpr T normalized_index(T refl, T refrefl, T epsilon=10e-6) | |
67 | +constexpr T normalized_index(const T refl, const T refrefl, | |
68 | + const bool acorfeat=false, | |
69 | + const T acorepsilon=0.05, | |
70 | + const T epsilon=10e-6) | |
68 | 71 | { |
69 | - return std::fabs(refl+refrefl)<epsilon? | |
72 | + if(!acorfeat) | |
73 | + { | |
74 | + return std::fabs(refl+refrefl)<epsilon? | |
70 | 75 | T{0}:(refl-refrefl)/(refl+refrefl); |
76 | + } | |
77 | + else | |
78 | + { | |
79 | + return (refl-refrefl-acorepsilon)/(refl+refrefl+acorepsilon); | |
80 | + } | |
71 | 81 | } |
72 | 82 | |
73 | -template <typename PixelType> | |
74 | -class FeatureExtractionFunctor | |
75 | -{ | |
76 | -public: | |
77 | - using ValueType = typename PixelType::ValueType; | |
78 | - using VectorType = std::vector<ValueType>; | |
79 | - | |
80 | - struct Parameters { | |
81 | - size_t ComponentsPerDate{1}; | |
82 | - size_t RedIndex{3}; | |
83 | - size_t NIRIndex{4}; | |
84 | - size_t SWIRIndex{5}; | |
85 | - bool RelativeReflectances{false}; | |
86 | - size_t ReferenceIndex{3}; | |
87 | - bool RemoveDuplicates{true}; | |
88 | - ValueType NormalizedIndexFactor{1000}; | |
89 | - ValueType NoDataValue{-10000}; | |
90 | - size_t NumberOfInputComponents{1}; | |
91 | - bool CopyInputBands{false}; | |
92 | - }; | |
93 | - FeatureExtractionFunctor() = default; | |
94 | - FeatureExtractionFunctor(Parameters pars) | |
95 | - : m_ComponentsPerDate{pars.ComponentsPerDate}, m_RedIndex{pars.RedIndex}, | |
96 | - m_NIRIndex{pars.NIRIndex}, m_SWIRIndex{pars.SWIRIndex}, | |
97 | - m_RelativeReflectances{pars.RelativeReflectances}, | |
98 | - m_ReferenceIndex{pars.ReferenceIndex}, | |
99 | - m_RemoveDuplicates{pars.RemoveDuplicates}, | |
100 | - m_NormalizedIndexFactor{pars.NormalizedIndexFactor}, | |
101 | - m_NoDataValue{pars.NoDataValue}, | |
102 | - m_NumberOfInputComponents{pars.NumberOfInputComponents}, | |
103 | - m_CopyInputBands{pars.CopyInputBands} | |
83 | + template <typename PixelType> | |
84 | + class FeatureExtractionFunctor | |
104 | 85 | { |
105 | - m_NumberOfDates = m_NumberOfInputComponents/m_ComponentsPerDate; | |
106 | - UpdateNumberOfFeatures(); | |
107 | - m_NumberOfOutputComponents = ( m_NumberOfFeatures + | |
108 | - (m_CopyInputBands? | |
109 | - m_ComponentsPerDate:0))*m_NumberOfDates; | |
110 | - const auto max_index_band = std::max({m_RedIndex, m_NIRIndex, m_SWIRIndex}); | |
111 | - if(max_index_band > m_ComponentsPerDate) | |
112 | - throw std::domain_error("Band indices and components per date are not coherent."); | |
113 | - }; | |
86 | + public: | |
87 | + using ValueType = typename PixelType::ValueType; | |
88 | + using VectorType = std::vector<ValueType>; | |
114 | 89 | |
115 | - PixelType operator()(const PixelType& p) | |
116 | - { | |
117 | - if(p.GetSize()%m_ComponentsPerDate != 0) | |
118 | - throw std::domain_error("Pixel size incoherent with number of components per date."); | |
119 | - PixelType result(m_NumberOfOutputComponents); | |
120 | - //use std vectors instead of pixels | |
121 | - const auto inVec = VectorType(p.GetDataPointer(), | |
122 | - p.GetDataPointer()+p.GetSize()); | |
123 | - //copy the spectral bands | |
124 | - auto outVec = VectorType(m_NumberOfOutputComponents); | |
125 | - //copy the input reflectances | |
126 | - if(m_CopyInputBands) | |
127 | - { | |
128 | - AddReflectances(inVec, outVec); | |
129 | - } | |
90 | + struct Parameters { | |
91 | + size_t ComponentsPerDate{1}; | |
92 | + size_t RedIndex{3}; | |
93 | + size_t NIRIndex{4}; | |
94 | + size_t SWIRIndex{5}; | |
95 | + bool RelativeReflectances{false}; | |
96 | + size_t ReferenceIndex{3}; | |
97 | + bool RemoveDuplicates{true}; | |
98 | + ValueType NormalizedIndexFactor{1000}; | |
99 | + ValueType NoDataValue{-10000}; | |
100 | + size_t NumberOfInputComponents{1}; | |
101 | + bool CopyInputBands{false}; | |
102 | + bool ACorFeat{false}; | |
103 | + }; | |
104 | + FeatureExtractionFunctor() = default; | |
105 | + FeatureExtractionFunctor(Parameters pars) | |
106 | + : m_ComponentsPerDate{pars.ComponentsPerDate}, m_RedIndex{pars.RedIndex}, | |
107 | + m_NIRIndex{pars.NIRIndex}, m_SWIRIndex{pars.SWIRIndex}, | |
108 | + m_RelativeReflectances{pars.RelativeReflectances}, | |
109 | + m_ReferenceIndex{pars.ReferenceIndex}, | |
110 | + m_RemoveDuplicates{pars.RemoveDuplicates}, | |
111 | + m_NormalizedIndexFactor{pars.NormalizedIndexFactor}, | |
112 | + m_NoDataValue{pars.NoDataValue}, | |
113 | + m_NumberOfInputComponents{pars.NumberOfInputComponents}, | |
114 | + m_CopyInputBands{pars.CopyInputBands}, | |
115 | + m_ACorFeat{pars.ACorFeat} | |
116 | + { | |
117 | + m_NumberOfDates = m_NumberOfInputComponents/m_ComponentsPerDate; | |
118 | + UpdateNumberOfFeatures(); | |
119 | + m_NumberOfOutputComponents = ( m_NumberOfFeatures + | |
120 | + (m_CopyInputBands? | |
121 | + m_ComponentsPerDate:0))*m_NumberOfDates; | |
122 | + const auto max_index_band = std::max({m_RedIndex, m_NIRIndex, m_SWIRIndex}); | |
123 | + if(max_index_band > m_ComponentsPerDate) | |
124 | + throw std::domain_error("Band indices and components per date are not coherent."); | |
125 | + }; | |
130 | 126 | |
131 | - ComputeFeatures(inVec, outVec); | |
127 | + PixelType operator()(const PixelType& p) | |
128 | + { | |
129 | + if(p.GetSize()%m_ComponentsPerDate != 0) | |
130 | + throw std::domain_error("Pixel size incoherent with number of components per date."); | |
131 | + PixelType result(m_NumberOfOutputComponents); | |
132 | + //use std vectors instead of pixels | |
133 | + const auto inVec = VectorType(p.GetDataPointer(), | |
134 | + p.GetDataPointer()+p.GetSize()); | |
135 | + //copy the spectral bands | |
136 | + auto outVec = VectorType(m_NumberOfOutputComponents); | |
137 | + //copy the input reflectances | |
138 | + if(m_CopyInputBands) | |
139 | + { | |
140 | + AddReflectances(inVec, outVec); | |
141 | + } | |
142 | + | |
143 | + ComputeFeatures(inVec, outVec); | |
132 | 144 | |
133 | - //convert the result to a pixel | |
134 | - for(size_t i=0; i<m_NumberOfOutputComponents; i++) | |
135 | - result[i] = outVec[i]; | |
136 | - return result; | |
137 | - } | |
145 | + //convert the result to a pixel | |
146 | + for(size_t i=0; i<m_NumberOfOutputComponents; i++) | |
147 | + result[i] = outVec[i]; | |
148 | + return result; | |
149 | + } | |
138 | 150 | |
139 | - bool operator!=(FeatureExtractionFunctor<PixelType> f) | |
140 | - { | |
141 | - return m_ComponentsPerDate != f.m_ComponentsPerDate; | |
142 | - } | |
151 | + bool operator!=(FeatureExtractionFunctor<PixelType> f) | |
152 | + { | |
153 | + return m_ComponentsPerDate != f.m_ComponentsPerDate; | |
154 | + } | |
143 | 155 | |
144 | - size_t GetNumberOfOutputComponents() const | |
145 | - { | |
146 | - return m_NumberOfOutputComponents; | |
147 | - } | |
156 | + size_t GetNumberOfOutputComponents() const | |
157 | + { | |
158 | + return m_NumberOfOutputComponents; | |
159 | + } | |
148 | 160 | |
149 | -protected: | |
150 | - inline void UpdateNumberOfFeatures() | |
151 | - { | |
152 | - if(m_SWIRIndex==0) --m_NumberOfFeatures; | |
153 | - if((m_RelativeReflectances && m_RemoveDuplicates && | |
154 | - (m_ReferenceIndex==m_RedIndex || m_ReferenceIndex==m_NIRIndex))) | |
155 | - --m_NumberOfFeatures; | |
156 | - } | |
161 | + protected: | |
162 | + inline void UpdateNumberOfFeatures() | |
163 | + { | |
164 | + if(m_SWIRIndex==0) --m_NumberOfFeatures; | |
165 | + if((m_RelativeReflectances && m_RemoveDuplicates && | |
166 | + (m_ReferenceIndex==m_RedIndex || m_ReferenceIndex==m_NIRIndex))) | |
167 | + --m_NumberOfFeatures; | |
168 | + } | |
157 | 169 | |
158 | - inline | |
159 | - void AddReflectances(const VectorType& inVec, VectorType& outVec) | |
160 | - { | |
161 | - if(!m_RelativeReflectances) | |
162 | - { | |
163 | - std::copy(inVec.cbegin(), inVec.cend(), outVec.begin()); | |
164 | - } | |
165 | - else | |
166 | - { | |
167 | - AddRelativeReflectances(inVec, outVec); | |
168 | - } | |
169 | - } | |
170 | + inline | |
171 | + void AddReflectances(const VectorType& inVec, VectorType& outVec) | |
172 | + { | |
173 | + if(!m_RelativeReflectances) | |
174 | + { | |
175 | + std::copy(inVec.cbegin(), inVec.cend(), outVec.begin()); | |
176 | + } | |
177 | + else | |
178 | + { | |
179 | + AddRelativeReflectances(inVec, outVec); | |
180 | + } | |
181 | + } | |
170 | 182 | |
171 | - inline | |
172 | - void AddRelativeReflectances(const VectorType& inVec, VectorType& outVec) | |
173 | - { | |
174 | - for(size_t d=0; d<m_NumberOfDates; ++d) | |
175 | - { | |
176 | - for(size_t c=0; c<m_ComponentsPerDate; ++c) | |
183 | + inline | |
184 | + void AddRelativeReflectances(const VectorType& inVec, VectorType& outVec) | |
185 | + { | |
186 | + for(size_t d=0; d<m_NumberOfDates; ++d) | |
177 | 187 | { |
178 | - const size_t date_offset = m_ComponentsPerDate*d; | |
179 | - const size_t position = c+date_offset; | |
180 | - const size_t refrefl_position = m_ReferenceIndex-1+date_offset; | |
181 | - if(position != refrefl_position) | |
182 | - { | |
183 | - outVec[position] = normalized_index(inVec[position], | |
184 | - inVec[refrefl_position]) * | |
185 | - m_NormalizedIndexFactor; | |
186 | - } | |
187 | - else | |
188 | + for(size_t c=0; c<m_ComponentsPerDate; ++c) | |
188 | 189 | { |
189 | - outVec[position] = inVec[position]; | |
190 | + const size_t date_offset = m_ComponentsPerDate*d; | |
191 | + const size_t position = c+date_offset; | |
192 | + const size_t refrefl_position = m_ReferenceIndex-1+date_offset; | |
193 | + if(position != refrefl_position) | |
194 | + { | |
195 | + outVec[position] = normalized_index(inVec[position], | |
196 | + inVec[refrefl_position], | |
197 | + m_ACorFeat) * | |
198 | + m_NormalizedIndexFactor; | |
199 | + } | |
200 | + else | |
201 | + { | |
202 | + outVec[position] = inVec[position]; | |
203 | + } | |
190 | 204 | } |
191 | 205 | } |
192 | - } | |
193 | - } | |
206 | + } | |
194 | 207 | |
195 | - inline | |
196 | - void ComputeFeatures(const VectorType& inVec, VectorType& outVec) | |
197 | - { | |
198 | - size_t copyOffset = (m_CopyInputBands?m_NumberOfInputComponents:0); | |
199 | - size_t date_counter{0}; | |
200 | - auto inIt = inVec.cbegin(); | |
201 | - while(inIt != inVec.cend()) | |
202 | - { | |
203 | - //check for invalid values | |
204 | - if(std::any_of(inIt, inIt+m_ComponentsPerDate, | |
205 | - [&](ValueType x) | |
206 | - { | |
207 | - return std::fabs(x - m_NoDataValue)<0.1; | |
208 | - })) | |
208 | + inline | |
209 | + void ComputeFeatures(const VectorType& inVec, VectorType& outVec) | |
210 | + { | |
211 | + size_t copyOffset = (m_CopyInputBands?m_NumberOfInputComponents:0); | |
212 | + size_t date_counter{0}; | |
213 | + auto inIt = inVec.cbegin(); | |
214 | + while(inIt != inVec.cend()) | |
209 | 215 | { |
210 | - for(size_t feat=0; feat<m_NumberOfFeatures; ++feat) | |
216 | + //check for invalid values | |
217 | + if(std::any_of(inIt, inIt+m_ComponentsPerDate, | |
218 | + [&](ValueType x) | |
219 | + { | |
220 | + return std::fabs(x - m_NoDataValue)<0.1; | |
221 | + })) | |
211 | 222 | { |
212 | - outVec[copyOffset+m_NumberOfDates*feat+date_counter] = m_NoDataValue; | |
223 | + for(size_t feat=0; feat<m_NumberOfFeatures; ++feat) | |
224 | + { | |
225 | + outVec[copyOffset+m_NumberOfDates*feat+date_counter] = m_NoDataValue; | |
226 | + } | |
213 | 227 | } |
214 | - } | |
215 | - else | |
216 | - { | |
217 | - //compute the features | |
218 | - const auto red = *(inIt+m_RedIndex-1); | |
219 | - const auto nir = *(inIt+m_NIRIndex-1); | |
220 | - const auto swir = *(inIt+(m_SWIRIndex>0?m_SWIRIndex:1)-1); | |
221 | - VectorType tmpVec(m_ComponentsPerDate); | |
222 | - std::transform(inIt, inIt+m_ComponentsPerDate,tmpVec.begin(), | |
223 | - [](decltype(*inIt)x){ return x*x;}); | |
224 | - const auto brightness = std::sqrt(std::accumulate(tmpVec.begin(), tmpVec.end(), | |
225 | - ValueType{0})); | |
226 | - //append the features | |
227 | - size_t featureOffset{0}; | |
228 | - //ndvi | |
229 | - featureOffset = AddNormalizedIndexMaybe(nir, red, m_RedIndex, featureOffset, | |
230 | - copyOffset, outVec, date_counter); | |
231 | - //ndwi | |
232 | - if(m_SWIRIndex!=0) | |
228 | + else | |
233 | 229 | { |
234 | - featureOffset = AddNormalizedIndexMaybe(swir, nir, m_NIRIndex, featureOffset, | |
230 | + //compute the features | |
231 | + const auto red = *(inIt+m_RedIndex-1); | |
232 | + const auto nir = *(inIt+m_NIRIndex-1); | |
233 | + const auto swir = *(inIt+(m_SWIRIndex>0?m_SWIRIndex:1)-1); | |
234 | + VectorType tmpVec(m_ComponentsPerDate); | |
235 | + std::transform(inIt, inIt+m_ComponentsPerDate,tmpVec.begin(), | |
236 | + [](decltype(*inIt)x){ return x*x;}); | |
237 | + const auto brightness = std::sqrt(std::accumulate(tmpVec.begin(), tmpVec.end(), | |
238 | + ValueType{0})); | |
239 | + //append the features | |
240 | + size_t featureOffset{0}; | |
241 | + //ndvi | |
242 | + featureOffset = AddNormalizedIndexMaybe(nir, red, m_RedIndex, featureOffset, | |
235 | 243 | copyOffset, outVec, date_counter); |
244 | + //ndwi | |
245 | + if(m_SWIRIndex!=0) | |
246 | + { | |
247 | + featureOffset = AddNormalizedIndexMaybe(swir, nir, m_NIRIndex, featureOffset, | |
248 | + copyOffset, outVec, date_counter); | |
249 | + } | |
250 | + outVec[copyOffset+m_NumberOfDates*featureOffset+date_counter] = brightness; | |
236 | 251 | } |
237 | - outVec[copyOffset+m_NumberOfDates*featureOffset+date_counter] = brightness; | |
252 | + //move to the next date | |
253 | + std::advance(inIt, m_ComponentsPerDate); | |
254 | + ++date_counter; | |
238 | 255 | } |
239 | - //move to the next date | |
240 | - std::advance(inIt, m_ComponentsPerDate); | |
241 | - ++date_counter; | |
242 | - } | |
243 | - } | |
256 | + } | |
244 | 257 | |
245 | - inline | |
246 | - size_t AddNormalizedIndexMaybe(ValueType refl, ValueType refrefl, | |
247 | - size_t refindex, size_t featureOffset, | |
248 | - size_t copyOffset, VectorType& outVec, | |
249 | - size_t date_counter) | |
250 | - { | |
251 | - auto result = featureOffset; | |
252 | - if(!(m_RelativeReflectances && m_RemoveDuplicates && m_ReferenceIndex == refindex)) | |
253 | - { | |
254 | - outVec[copyOffset+m_NumberOfDates*featureOffset+date_counter] = | |
255 | - normalized_index(refl, refrefl) * m_NormalizedIndexFactor; | |
256 | - ++result; | |
257 | - } | |
258 | - return result; | |
259 | - } | |
258 | + inline | |
259 | + size_t AddNormalizedIndexMaybe(ValueType refl, ValueType refrefl, | |
260 | + size_t refindex, size_t featureOffset, | |
261 | + size_t copyOffset, VectorType& outVec, | |
262 | + size_t date_counter) | |
263 | + { | |
264 | + auto result = featureOffset; | |
265 | + if(!(m_RelativeReflectances && m_RemoveDuplicates && m_ReferenceIndex == refindex)) | |
266 | + { | |
267 | + outVec[copyOffset+m_NumberOfDates*featureOffset+date_counter] = | |
268 | + normalized_index(refl, refrefl, m_ACorFeat) * m_NormalizedIndexFactor; | |
269 | + ++result; | |
270 | + } | |
271 | + return result; | |
272 | + } | |
260 | 273 | |
261 | - size_t m_ComponentsPerDate; | |
262 | - size_t m_RedIndex; | |
263 | - size_t m_NIRIndex; | |
264 | - size_t m_SWIRIndex; | |
265 | - bool m_RelativeReflectances; | |
266 | - size_t m_ReferenceIndex; //which reflectance is used as reference if | |
267 | - //relative reflectances are used | |
268 | - bool m_RemoveDuplicates; //If relative reflectances, NDVI or NDWI | |
269 | - //may be redundant | |
270 | - ValueType m_NormalizedIndexFactor; | |
271 | - ValueType m_NoDataValue; | |
272 | - size_t m_NumberOfInputComponents; | |
273 | - bool m_CopyInputBands; | |
274 | - size_t m_NumberOfOutputComponents; | |
275 | - size_t m_NumberOfDates; | |
276 | - size_t m_NumberOfFeatures = 3; | |
277 | -}; | |
274 | + size_t m_ComponentsPerDate; | |
275 | + size_t m_RedIndex; | |
276 | + size_t m_NIRIndex; | |
277 | + size_t m_SWIRIndex; | |
278 | + bool m_RelativeReflectances; | |
279 | + size_t m_ReferenceIndex; //which reflectance is used as reference if | |
280 | + //relative reflectances are used | |
281 | + bool m_RemoveDuplicates; //If relative reflectances, NDVI or NDWI | |
282 | + //may be redundant | |
283 | + ValueType m_NormalizedIndexFactor; | |
284 | + ValueType m_NoDataValue; | |
285 | + size_t m_NumberOfInputComponents; | |
286 | + bool m_CopyInputBands; | |
287 | + bool m_ACorFeat; | |
288 | + size_t m_NumberOfOutputComponents; | |
289 | + size_t m_NumberOfDates; | |
290 | + size_t m_NumberOfFeatures = 3; | |
291 | + }; | |
278 | 292 | } // end namespace iota2 |
279 | 293 | |
280 | 294 | ... | ... |
scripts/common/GenJobLaunchOutStat.py
... | ... | @@ -44,7 +44,7 @@ def genJob(jobPath,testPath,logPath,pathConf): |
44 | 44 | module load python/2.7.12\n\ |
45 | 45 | #module remove xerces/2.7\n\ |
46 | 46 | #module load xerces/2.8\n\ |
47 | -module load pygdal/2.1.0-py2.7\n\ | |
47 | +module load gcc/6.3.0\n\ | |
48 | 48 | \n\ |
49 | 49 | FileConfig=%s\n\ |
50 | 50 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -72,7 +72,7 @@ python outStats.py -tile ${ListeTuile[${PBS_ARRAY_INDEX}]} -conf $FileConfig --s |
72 | 72 | module load python/2.7.12\n\ |
73 | 73 | #module remove xerces/2.7\n\ |
74 | 74 | #module load xerces/2.8\n\ |
75 | -module load pygdal/2.1.0-py2.7\n\ | |
75 | +module load gcc/6.3.0\n\ | |
76 | 76 | \n\ |
77 | 77 | FileConfig=%s\n\ |
78 | 78 | export ITK_AUTOLOAD_PATH=""\n\ | ... | ... |
scripts/common/GenSensors.py
... | ... | @@ -90,7 +90,8 @@ class Sensor(object): |
90 | 90 | |
91 | 91 | fList = [] |
92 | 92 | |
93 | - for image in glob.glob(self.path+self.struct_path+self.imType): | |
93 | + glob_path = (self.path+self.struct_path+self.imType).replace("[","[[]") | |
94 | + for image in glob.glob(glob_path): | |
94 | 95 | imagePath = image.split("/") |
95 | 96 | imageName = imagePath[-1].split("_") |
96 | 97 | imageList.append(imageName) |
... | ... | @@ -106,7 +107,7 @@ class Sensor(object): |
106 | 107 | s = "_" |
107 | 108 | nameIm = s.join(imSorted) |
108 | 109 | name = self.struct_path+nameIm#imSorted |
109 | - for im in glob.glob(self.path+"/"+name): | |
110 | + for im in glob.glob((self.path+"/"+name).replace("[","[[]")): | |
110 | 111 | file.write(im) |
111 | 112 | file.write('\n') |
112 | 113 | fList.append(im) |
... | ... | @@ -123,7 +124,7 @@ class Sensor(object): |
123 | 124 | imageList = [] |
124 | 125 | fList = [] |
125 | 126 | |
126 | - for image in glob.glob(self.pathRes+"/*"+self.imType): | |
127 | + for image in glob.glob((self.pathRes+"/*"+self.imType).replace("[","[[]")): | |
127 | 128 | imagePath = image.split("/") |
128 | 129 | imageName = imagePath[-1].split("_") |
129 | 130 | imageList.append(imageName) |
... | ... | @@ -138,7 +139,7 @@ class Sensor(object): |
138 | 139 | s = "_" |
139 | 140 | nameIm = s.join(imSorted) |
140 | 141 | #name = imSorted |
141 | - for im in glob.glob(self.pathRes+"/"+nameIm): | |
142 | + for im in glob.glob((self.pathRes+"/"+nameIm).replace("[","[[]")): | |
142 | 143 | #file.write(im) |
143 | 144 | #file.write('\n') |
144 | 145 | fList.append(im) |
... | ... | @@ -161,45 +162,45 @@ class Sensor(object): |
161 | 162 | s = "_" |
162 | 163 | nameIm = s.join(imSorted) |
163 | 164 | print self.pathmask+nameIm |
164 | - liste_Sort.append(glob.glob(self.pathmask+nameIm)[0]) | |
165 | + liste_Sort.append(glob.glob((self.pathmask+nameIm).replace("[","[[]"))[0]) | |
165 | 166 | |
166 | 167 | return liste_Sort |
167 | 168 | |
168 | 169 | def getList_NoDataMask(self): |
169 | - liste_nodata = glob.glob(self.pathmask+"/*"+self.nodata) | |
170 | + liste_nodata = glob.glob((self.pathmask+"/*"+self.nodata).replace("[","[[]")) | |
170 | 171 | liste = self.sortMask(liste_nodata) |
171 | 172 | return liste |
172 | 173 | |
173 | 174 | |
174 | 175 | def getList_CloudMask(self): |
175 | - liste_cloud = glob.glob(self.pathmask+"/*"+self.nuages) | |
176 | + liste_cloud = glob.glob((self.pathmask+"/*"+self.nuages).replace("[","[[]")) | |
176 | 177 | liste = self.sortMask(liste_cloud) |
177 | 178 | return liste |
178 | 179 | |
179 | 180 | |
180 | 181 | def getList_SatMask(self): |
181 | - liste_sat = glob.glob(self.pathmask+"/*"+self.saturation) | |
182 | + liste_sat = glob.glob((self.pathmask+"/*"+self.saturation).replace("[","[[]")) | |
182 | 183 | liste = self.sortMask(liste_sat) |
183 | 184 | return liste |
184 | 185 | |
185 | 186 | def getList_DivMask(self): |
186 | 187 | print "pathsearchmask",self.pathmask+"/*"+self.div |
187 | - liste_div = glob.glob(self.pathmask+"/*"+self.div) | |
188 | + liste_div = glob.glob((self.pathmask+"/*"+self.div).replace("[","[[]")) | |
188 | 189 | liste = self.sortMask(liste_div) |
189 | 190 | return liste |
190 | 191 | |
191 | 192 | def getList_ResCloudMask(self): |
192 | - liste_cloud = glob.glob(self.pathRes+"/*"+self.nuages) | |
193 | + liste_cloud = glob.glob((self.pathRes+"/*"+self.nuages).replace("[","[[]")) | |
193 | 194 | liste = self.sortMask(liste_cloud) |
194 | 195 | return liste |
195 | 196 | |
196 | 197 | def getList_ResSatMask(self): |
197 | - liste_sat = glob.glob(self.pathRes+"/*"+self.saturation) | |
198 | + liste_sat = glob.glob((self.pathRes+"/*"+self.saturation).replace("[","[[]")) | |
198 | 199 | liste = self.sortMask(liste_sat) |
199 | 200 | return liste |
200 | 201 | |
201 | 202 | def getList_ResDivMask(self): |
202 | - liste_div = glob.glob(self.pathRes+"/*"+self.div) | |
203 | + liste_div = glob.glob((self.pathRes+"/*"+self.div).replace("[","[[]")) | |
203 | 204 | liste = self.sortMask(liste_div) |
204 | 205 | return liste |
205 | 206 | |
... | ... | @@ -265,7 +266,7 @@ class Sensor(object): |
265 | 266 | expr += "+im"+str(i+1)+"b1" |
266 | 267 | else: |
267 | 268 | #expr = "+".join([ "im"+str(i+1)+"b1" for i in range(len(mlist))]) |
268 | - expr = "+".join([ "(1-im"+str(i+1)+"b1)" for i in range(len(mlist))]) | |
269 | + expr = "+".join([ "(1-im"+str(i+1)+"b1)" for i in range(len(mlist))]) | |
269 | 270 | |
270 | 271 | listMask_s = indBinary |
271 | 272 | if self.name == 'Sentinel2':listMask_s = mlist | ... | ... |
scripts/common/LaunchTraining.py
... | ... | @@ -74,15 +74,19 @@ def writeConfigName(r,tileList,configfile): |
74 | 74 | configModel.write("\n\t{\n\tmodelName:'"+r+"'\n\ttilesList:'"+tileList+"'\n\t}") |
75 | 75 | configModel.close() |
76 | 76 | |
77 | -def buildTrainCmd_points(r,paths,classif,options,dataField,out,seed,stat,pathlog,groundTruth): | |
77 | +def buildTrainCmd_points(r,paths,classif,options,dataField,out,seed,stat,pathlog,shape_ref): | |
78 | 78 | |
79 | + """ | |
80 | + shape_ref [param] [string] path to a shape use to determine how many fields | |
81 | + are already present before adding features | |
82 | + """ | |
79 | 83 | cmd = "otbcli_TrainVectorClassifier -io.vd " |
80 | 84 | if paths.count("learn")!=0: |
81 | 85 | cmd = cmd +" "+paths |
82 | 86 | |
83 | 87 | cmd = cmd+" -classifier "+classif+" "+options+" -cfield "+dataField.lower()+" -io.out "+out+"/model_"+str(r)+"_seed_"+str(seed)+".txt" |
84 | 88 | |
85 | - nb_origin_fields = len(fu.getAllFieldsInShape(groundTruth))+1 | |
89 | + nb_origin_fields = len(fu.getAllFieldsInShape(shape_ref))+1 | |
86 | 90 | features_labels = " ".join(fu.getAllFieldsInShape(paths,"SQLite")[nb_origin_fields:]) |
87 | 91 | cmd = cmd+" -feat "+features_labels |
88 | 92 | |
... | ... | @@ -117,6 +121,79 @@ def buildTrainCmd_poly(r,paths,pathToTiles,Stack_ind,classif,options,dataField,o |
117 | 121 | cmd = cmd +" > "+pathlog+"/LOG_model_"+str(r)+"_seed_"+str(seed)+".out" |
118 | 122 | return cmd |
119 | 123 | |
124 | + | |
125 | +def models_in_tiles(vectors): | |
126 | + """ | |
127 | + usage : use to kwow in which tile models are present | |
128 | + """ | |
129 | + | |
130 | + #const | |
131 | + #model's position, if training shape is split by "_" | |
132 | + posModel = -3 | |
133 | + | |
134 | + output = "AllModel:\n[" | |
135 | + for vector in vectors: | |
136 | + model = os.path.split(vector)[-1].split("_")[posModel] | |
137 | + tiles = fu.getFieldElement(vector, driverName="SQLite", field="tile_o", | |
138 | + mode="unique", elemType="str") | |
139 | + | |
140 | + tmp = "modelName: '{}'\n\ttilesList: '{}'".format(model, "_".join(tiles)) | |
141 | + output += "\n\t{\n\t" + tmp + "\n\t}\n\t" | |
142 | + output+="\n]" | |
143 | + return output | |
144 | + | |
145 | + | |
146 | +def config_model(outputPath): | |
147 | + """ | |
148 | + usage deternmine which model will class which tile | |
149 | + """ | |
150 | + #const | |
151 | + region_field = "region" | |
152 | + region_split_field = "DN" | |
153 | + output = None | |
154 | + posTile = 0 | |
155 | + formatting_vec_dir = os.path.join(outputPath, "formattingVectors") | |
156 | + samples = fu.FileSearch_AND(formatting_vec_dir,True, "seed_0", ".shp") | |
157 | + | |
158 | + #init | |
159 | + all_regions = [] | |
160 | + for sample in samples: | |
161 | + tile_name = os.path.splitext(os.path.basename(sample))[0].split("_")[posTile] | |
162 | + regions = fu.getFieldElement(sample, driverName="ESRI Shapefile", field=region_field, mode="unique", | |
163 | + elemType="str") | |
164 | + for region in regions: | |
165 | + all_regions.append((region, tile_name)) | |
166 | + | |
167 | + #{'model_name':[TileName, TileName...],'...':...,...} | |
168 | + model_tiles = dict(fu.sortByFirstElem(all_regions)) | |
169 | + | |
170 | + #add tiles if they are missing by checking in /shapeRegion/ directory | |
171 | + shape_region_dir = os.path.join(outputPath, "shapeRegion") | |
172 | + shape_region_path = fu.FileSearch_AND(shape_region_dir,True, ".shp") | |
173 | + | |
174 | + #check if there is actually polygons | |
175 | + shape_regions = [elem for elem in shape_region_path if len(fu.getFieldElement(elem, | |
176 | + driverName="ESRI Shapefile", | |
177 | + field=region_split_field, | |
178 | + mode="all", | |
179 | + elemType="str"))>=1] | |
180 | + for shape_region in shape_regions: | |
181 | + tile = os.path.splitext(os.path.basename(shape_region))[0].split("_")[-1] | |
182 | + region = os.path.splitext(os.path.basename(shape_region))[0].split("_")[-2] | |
183 | + for model_name, tiles_model in model_tiles.items(): | |
184 | + if model_name.split("f")[0] == region and not tile in tiles_model: | |
185 | + tiles_model.append(tile) | |
186 | + | |
187 | + #Construct output file string | |
188 | + output = "AllModel:\n[" | |
189 | + for model_name, tiles_model in model_tiles.items(): | |
190 | + output_tmp = "\n\tmodelName:'{}'\n\ttilesList:'{}'".format(model_name, "_".join(tiles_model)) | |
191 | + output = output + "\n\t{" + output_tmp + "\n\t}" | |
192 | + output += "\n]" | |
193 | + | |
194 | + return output | |
195 | + | |
196 | + | |
120 | 197 | def launchTraining(pathShapes, cfg, pathToTiles, dataField, stat, N, |
121 | 198 | pathToCmdTrain, out, pathWd, pathlog): |
122 | 199 | |
... | ... | @@ -131,16 +208,17 @@ def launchTraining(pathShapes, cfg, pathToTiles, dataField, stat, N, |
131 | 208 | outputPath = cfg.getParam('chain', 'outputPath') |
132 | 209 | samplesMode = cfg.getParam('argTrain', 'shapeMode') |
133 | 210 | dataField = cfg.getParam('chain', 'dataField') |
134 | - groundTruth = cfg.getParam('chain', 'groundTruth') | |
135 | 211 | |
212 | + shape_ref = fu.FileSearch_AND(os.path.join(outputPath,"formattingVectors"), True, ".shp")[0] | |
136 | 213 | posModel = -3 #model's position, if training shape is split by "_" |
137 | 214 | |
138 | - Stack_ind = fu.getFeatStackName(pathConf) | |
139 | - | |
140 | 215 | pathToModelConfig = outputPath+"/config_model/configModel.cfg" |
141 | - configModel = open(pathToModelConfig,"w") | |
142 | - configModel.write("AllModel:\n[\n") | |
143 | - configModel.close() | |
216 | + | |
217 | + if not os.path.exists(pathToModelConfig): | |
218 | + tiles_model = config_model(outputPath) | |
219 | + with open(pathToModelConfig, "w") as pathToModelConfig_file: | |
220 | + pathToModelConfig_file.write(tiles_model) | |
221 | + | |
144 | 222 | for seed in range(N): |
145 | 223 | pathAppVal = fu.FileSearch_AND(pathShapes,True,"seed"+str(seed),".shp","learn") |
146 | 224 | sort = [(path.split("/")[-1].split("_")[posModel],path) for path in pathAppVal] |
... | ... | @@ -156,9 +234,7 @@ def launchTraining(pathShapes, cfg, pathToTiles, dataField, stat, N, |
156 | 234 | tmp = tmp+paths[i].split("/")[-1].split("_")[0] |
157 | 235 | names.append(tmp) |
158 | 236 | cpt = 0 |
159 | - for r,paths in sort: | |
160 | - writeConfigName(r,names[cpt],pathToModelConfig) | |
161 | - cpt+=1 | |
237 | + | |
162 | 238 | if samplesMode == "points": |
163 | 239 | pathAppVal = fu.FileSearch_AND(outputPath+"/learningSamples",True,"seed"+str(seed),".sqlite","learn") |
164 | 240 | sort = [(path.split("/")[-1].split("_")[posModel],path) for path in pathAppVal] |
... | ... | @@ -173,13 +249,8 @@ def launchTraining(pathShapes, cfg, pathToTiles, dataField, stat, N, |
173 | 249 | if os.path.exists(outStats): |
174 | 250 | os.remove(outStats) |
175 | 251 | writeStatsFromSample(paths,outStats) |
176 | - cmd = buildTrainCmd_points(r,paths,classif,options,dataField,out,seed,stat,pathlog,groundTruth) | |
252 | + cmd = buildTrainCmd_points(r,paths,classif,options,dataField,out,seed,stat,pathlog,shape_ref) | |
177 | 253 | cmd_out.append(cmd) |
178 | - | |
179 | - | |
180 | - configModel = open(pathToModelConfig,"a") | |
181 | - configModel.write("\n]\n") | |
182 | - configModel.close() | |
183 | 254 | |
184 | 255 | fu.writeCmds(pathToCmdTrain+"/train.txt",cmd_out) |
185 | 256 | |
... | ... | @@ -208,60 +279,3 @@ if __name__ == "__main__": |
208 | 279 | launchTraining(args.pathShapes, cfg, args.pathToTiles, args.dataField, |
209 | 280 | args.stat, args.N, args.pathToCmdTrain, args.out, |
210 | 281 | args.pathWd, args.pathlog) |
211 | - | |
212 | - | |
213 | - | |
214 | - | |
215 | - | |
216 | - | |
217 | - | |
218 | - | |
219 | - | |
220 | - | |
221 | - | |
222 | - | |
223 | - | |
224 | - | |
225 | - | |
226 | - | |
227 | - | |
228 | - | |
229 | - | |
230 | - | |
231 | - | |
232 | - | |
233 | - | |
234 | - | |
235 | - | |
236 | - | |
237 | - | |
238 | - | |
239 | - | |
240 | - | |
241 | - | |
242 | - | |
243 | - | |
244 | - | |
245 | - | |
246 | - | |
247 | - | |
248 | - | |
249 | - | |
250 | - | |
251 | - | |
252 | - | |
253 | - | |
254 | - | |
255 | - | |
256 | - | |
257 | - | |
258 | - | |
259 | - | |
260 | - | |
261 | - | |
262 | - | |
263 | - | |
264 | - | |
265 | - | |
266 | - | |
267 | - | ... | ... |
scripts/common/Utils.py
... | ... | @@ -12,20 +12,20 @@ |
12 | 12 | # ========================================================================= |
13 | 13 | |
14 | 14 | import os, datetime, subprocess, sys |
15 | -import logging | |
15 | +#import logging | |
16 | 16 | from timeit import default_timer as timer |
17 | 17 | |
18 | 18 | def run(cmd, desc=None, env=os.environ): |
19 | 19 | |
20 | 20 | # Get logger |
21 | - logger = logging.getLogger(__name__) | |
21 | + #logger = logging.getLogger(__name__) | |
22 | 22 | |
23 | 23 | # Log description of step if available |
24 | - if desc is not None: | |
25 | - logger.info(desc) | |
24 | + #if desc is not None: | |
25 | + # logger.info(desc) | |
26 | 26 | |
27 | 27 | # Log cmd in debug |
28 | - logger.debug(cmd) | |
28 | + #logger.debug(cmd) | |
29 | 29 | |
30 | 30 | # Create subprocess |
31 | 31 | start = timer() |
... | ... | @@ -40,15 +40,13 @@ def run(cmd, desc=None, env=os.environ): |
40 | 40 | stop = timer() |
41 | 41 | |
42 | 42 | # Log outputs |
43 | - logger.debug("out/err: {}".format(out)) | |
44 | - | |
45 | - logger.debug("Done in {} seconds".format(stop-start)) | |
46 | - | |
43 | + #logger.debug("out/err: {}".format(out)) | |
47 | 44 | |
45 | + #logger.debug("Done in {} seconds".format(stop-start)) | |
48 | 46 | |
49 | 47 | # Log error code |
50 | - if rc != 0: | |
51 | - logger.error("Command {} exited with non-zero return code {}".format(cmd,rc)) | |
48 | + #if rc != 0: | |
49 | + # logger.error("Command {} exited with non-zero return code {}".format(cmd,rc)) | |
52 | 50 | |
53 | 51 | |
54 | 52 | class Opath(object): | ... | ... |
scripts/common/bPy_ImageClassifier.py
... | ... | @@ -18,8 +18,10 @@ from config import Config |
18 | 18 | import otbApplication as otb |
19 | 19 | import fileUtils as fu |
20 | 20 | from Utils import Opath |
21 | -import prepareStack,otbAppli | |
22 | - | |
21 | +import prepareStack | |
22 | +import otbAppli | |
23 | +import generateFeatures as genFeatures | |
24 | + | |
23 | 25 | def filterOTB_output(raster,mask,output,outputType=otb.ImagePixelType_uint8): |
24 | 26 | |
25 | 27 | bandMathFilter = otb.Registry.CreateApplication("BandMath") |
... | ... | @@ -31,7 +33,8 @@ def filterOTB_output(raster,mask,output,outputType=otb.ImagePixelType_uint8): |
31 | 33 | bandMathFilter.SetParameterOutputImagePixelType("out",outputType) |
32 | 34 | bandMathFilter.ExecuteAndWriteOutput() |
33 | 35 | |
34 | -def computeClasifications(model,outputClassif,confmap,MaximizeCPU,Classifmask,stats,AllFeatures,*ApplicationList): | |
36 | +def computeClasifications(model, outputClassif, confmap, MaximizeCPU, | |
37 | + Classifmask, stats, AllFeatures): | |
35 | 38 | |
36 | 39 | classifier = otb.Registry.CreateApplication("ImageClassifier") |
37 | 40 | classifier.SetParameterInputImage("in",AllFeatures.GetParameterOutputImage("out")) |
... | ... | @@ -39,13 +42,15 @@ def computeClasifications(model,outputClassif,confmap,MaximizeCPU,Classifmask,st |
39 | 42 | classifier.SetParameterOutputImagePixelType("out",otb.ImagePixelType_uint8) |
40 | 43 | classifier.SetParameterString("confmap",confmap+"?&writegeom=false") |
41 | 44 | classifier.SetParameterString("model",model) |
45 | + | |
42 | 46 | if not MaximizeCPU: |
43 | 47 | classifier.SetParameterString("mask",Classifmask) |
44 | 48 | if stats: |
45 | 49 | classifier.SetParameterString("imstat",stats) |
50 | + | |
46 | 51 | classifier.SetParameterString("ram","5000") |
47 | 52 | return classifier,AllFeatures |
48 | - | |
53 | + | |
49 | 54 | |
50 | 55 | def launchClassification(tempFolderSerie,Classifmask,model,stats, |
51 | 56 | outputClassif,confmap,pathWd,cfg,pixType, |
... | ... | @@ -59,30 +64,23 @@ def launchClassification(tempFolderSerie,Classifmask,model,stats, |
59 | 64 | wd = pathWd |
60 | 65 | if not pathWd: |
61 | 66 | wd = featuresPath |
62 | - os.environ["ITK_GLOBAL_DEFAULT_NUMBER_OF_THREADS"] = "5" | |
63 | - AllGapFill,AllRefl,AllMask,datesInterp,realDates,dep = otbAppli.gapFilling(cfg,tile,wMode=wMode,\ | |
64 | - featuresPath=None,workingDirectory=wd) | |
65 | - if wMode: | |
66 | - for currentGapFillSensor in AllGapFill: | |
67 | - currentGapFillSensor.ExecuteAndWriteOutput() | |
68 | - else: | |
69 | - for currentGapFillSensor in AllGapFill: | |
70 | - currentGapFillSensor.Execute() | |
71 | - nbDates = [fu.getNbDateInTile(currentDateFile) for currentDateFile in datesInterp] | |
67 | + | |
68 | + try: | |
69 | + useGapFilling = ast.literal_eval(cfg.getParam('GlobChain', 'useGapFilling')) | |
70 | + except: | |
71 | + useGapFilling = True | |
72 | + | |
73 | + AllFeatures, feat_labels, dep_features = genFeatures.generateFeatures(pathWd, tile, cfg, useGapFilling=useGapFilling) | |
72 | 74 | |
73 | - AllFeatures, feat_labels, ApplicationList,a,b,c,d,e = otbAppli.computeFeatures(cfg, nbDates,tile,\ | |
74 | - AllGapFill,AllRefl,\ | |
75 | - AllMask,datesInterp,realDates) | |
76 | 75 | if wMode: |
77 | 76 | AllFeatures.ExecuteAndWriteOutput() |
78 | 77 | else: |
79 | 78 | AllFeatures.Execute() |
80 | - classifier,inputStack = computeClasifications(model,outputClassif,\ | |
81 | - confmap,MaximizeCPU,Classifmask,\ | |
82 | - stats,AllFeatures,\ | |
83 | - AllGapFill,AllRefl,AllMask,\ | |
84 | - datesInterp,realDates,\ | |
85 | - AllFeatures,ApplicationList) | |
79 | + | |
80 | + classifier,inputStack = computeClasifications(model, outputClassif, | |
81 | + confmap, MaximizeCPU, | |
82 | + Classifmask, stats, | |
83 | + AllFeatures) | |
86 | 84 | classifier.ExecuteAndWriteOutput() |
87 | 85 | if MaximizeCPU: |
88 | 86 | filterOTB_output(outputClassif,Classifmask,outputClassif) | ... | ... |
scripts/common/codeStrings.py
... | ... | @@ -23,9 +23,10 @@ parallelChainStep1='\ |
23 | 23 | \n\ |
24 | 24 | #Chargement des modules nécessaire pour la création des répertoires et des .py\n\ |
25 | 25 | module load python/2.7.12\n\ |
26 | +module load gcc/6.3.0\n\ | |
26 | 27 | #module remove xerces/2.7\n\ |
27 | 28 | #module load xerces/2.8\n\ |
28 | -\n\ | |
29 | +source /work/OT/theia/oso/OTB/otb_superbuild/otb_superbuild-6.0-Release-install/config_otb.sh\n\ | |
29 | 30 | cd %s\n\ |
30 | 31 | \n\ |
31 | 32 | #path to pythons function\n\ |
... | ... | @@ -232,7 +233,8 @@ done\n\ |
232 | 233 | \n\ |
233 | 234 | ' |
234 | 235 | parallelChainStep8_b = '\ |
235 | -id_pyVectorSampler=$(qsub -W depend=afterok:$id_cmdGenStats,block=true genJobVectorSampler.pbs)\n\ | |
236 | +id_formattingV=$(qsub -W depend=afterok:$id_cmdGenStats formatting_vectors.pbs)\n\ | |
237 | +id_pyVectorSampler=$(qsub -W depend=afterok:$id_formattingV,block=true genJobVectorSampler.pbs)\n\ | |
236 | 238 | \n\ |
237 | 239 | id_vectorSampler=$(qsub vectorSampler.pbs)\n\ |
238 | 240 | \n\ |
... | ... | @@ -265,7 +267,8 @@ done\n\ |
265 | 267 | ' |
266 | 268 | |
267 | 269 | parallelChainStep8_c = '\ |
268 | -id_pyVectorSampler=$(qsub -W depend=afterok:$id_cmdGenStats,block=true genJobVectorSampler.pbs)\n\ | |
270 | +id_formattingV=$(qsub -W depend=afterok:$id_cmdGenStats formatting_vectors.pbs)\n\ | |
271 | +id_pyVectorSampler=$(qsub -W depend=afterok:$id_formattingV,block=true genJobVectorSampler.pbs)\n\ | |
269 | 272 | \n\ |
270 | 273 | id_vectorSampler=$(qsub vectorSampler.pbs)\n\ |
271 | 274 | \n\ |
... | ... | @@ -354,7 +357,7 @@ jobGenCmdFeatures='\ |
354 | 357 | module load python/2.7.12\n\ |
355 | 358 | #module remove xerces/2.7\n\ |
356 | 359 | #module load xerces/2.8\n\ |
357 | -module load pygdal/2.1.0-py2.7\n\ | |
360 | +module load gcc/6.3.0\n\ | |
358 | 361 | \n\ |
359 | 362 | FileConfig=%s\n\ |
360 | 363 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -388,7 +391,7 @@ jobGenJobLaunchFeat='\ |
388 | 391 | module load python/2.7.12\n\ |
389 | 392 | module remove xercesf/2.7\n\ |
390 | 393 | #module load xerces/2.8\n\ |
391 | -module load pygdal/2.1.0-py2.7\n\ | |
394 | +module load gcc/6.3.0\n\ | |
392 | 395 | \n\ |
393 | 396 | FileConfig=%s\n\ |
394 | 397 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -418,7 +421,7 @@ jobEnvelope='\ |
418 | 421 | module load python/2.7.12\n\ |
419 | 422 | #module remove xerces/2.7\n\ |
420 | 423 | #module load xerces/2.8\n\ |
421 | -module load pygdal/2.1.0-py2.7\n\ | |
424 | +module load gcc/6.3.0\n\ | |
422 | 425 | \n\ |
423 | 426 | FileConfig=%s\n\ |
424 | 427 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -450,7 +453,7 @@ jobGenerateRegionShape='\ |
450 | 453 | module load python/2.7.12\n\ |
451 | 454 | #module remove xerces/2.7\n\ |
452 | 455 | #module load xerces/2.8\n\ |
453 | -module load pygdal/2.1.0-py2.7\n\ | |
456 | +module load gcc/6.3.0\n\ | |
454 | 457 | \n\ |
455 | 458 | FileConfig=%s\n\ |
456 | 459 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -482,7 +485,7 @@ jobRegionByTiles='\ |
482 | 485 | module load python/2.7.12\n\ |
483 | 486 | #module remove xerces/2.7\n\ |
484 | 487 | #module load xerces/2.8\n\ |
485 | -module load pygdal/2.1.0-py2.7\n\ | |
488 | +module load gcc/6.3.0\n\ | |
486 | 489 | \n\ |
487 | 490 | FileConfig=%s\n\ |
488 | 491 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -511,7 +514,7 @@ jobExtractactData='\ |
511 | 514 | module load python/2.7.12\n\ |
512 | 515 | #module remove xerces/2.7\n\ |
513 | 516 | #module load xerces/2.8\n\ |
514 | -module load pygdal/2.1.0-py2.7\n\ | |
517 | +module load gcc/6.3.0\n\ | |
515 | 518 | \n\ |
516 | 519 | FileConfig=%s\n\ |
517 | 520 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -540,7 +543,7 @@ jobGenJobDataAppVal='\ |
540 | 543 | module load python/2.7.12\n\ |
541 | 544 | #module remove xerces/2.7\n\ |
542 | 545 | #module load xerces/2.8\n\ |
543 | -module load pygdal/2.1.0-py2.7\n\ | |
546 | +module load gcc/6.3.0\n\ | |
544 | 547 | \n\ |
545 | 548 | FileConfig=%s\n\ |
546 | 549 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -567,7 +570,7 @@ jobExtractStatsByPoly='\ |
567 | 570 | #PBS -e %s/genJobExtractStatsByPol_err.log\n\ |
568 | 571 | \n\ |
569 | 572 | module load python/2.7.12\n\ |
570 | -module load pygdal/2.1.0-py2.7\n\ | |
573 | +module load gcc/6.3.0\n\ | |
571 | 574 | \n\ |
572 | 575 | FileConfig=%s\n\ |
573 | 576 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -596,7 +599,7 @@ jobGenJobVectorSampler='\ |
596 | 599 | module load python/2.7.12\n\ |
597 | 600 | #module remove xerces/2.7\n\ |
598 | 601 | #module load xerces/2.8\n\ |
599 | -module load pygdal/2.1.0-py2.7\n\ | |
602 | +module load gcc/6.3.0\n\ | |
600 | 603 | \n\ |
601 | 604 | FileConfig=%s\n\ |
602 | 605 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -625,7 +628,7 @@ jobGenSamplesMerge = '\ |
625 | 628 | module load python/2.7.12\n\ |
626 | 629 | #module remove xerces/2.7\n\ |
627 | 630 | #module load xerces/2.8\n\ |
628 | -module load pygdal/2.1.0-py2.7\n\ | |
631 | +module load gcc/6.3.0\n\ | |
629 | 632 | \n\ |
630 | 633 | FileConfig=%s\n\ |
631 | 634 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -653,7 +656,7 @@ jobCmdSplitShape='\ |
653 | 656 | module load python/2.7.12\n\ |
654 | 657 | #module remove xerces/2.7\n\ |
655 | 658 | #module load xerces/2.8\n\ |
656 | -module load pygdal/2.1.0-py2.7\n\ | |
659 | +module load gcc/6.3.0\n\ | |
657 | 660 | \n\ |
658 | 661 | FileConfig=%s\n\ |
659 | 662 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -679,7 +682,7 @@ jobGenJobSplitShape='\ |
679 | 682 | module load python/2.7.12\n\ |
680 | 683 | #module remove xerces/2.7\n\ |
681 | 684 | #module load xerces/2.8\n\ |
682 | -module load pygdal/2.1.0-py2.7\n\ | |
685 | +module load gcc/6.3.0\n\ | |
683 | 686 | \n\ |
684 | 687 | FileConfig=%s\n\ |
685 | 688 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -708,7 +711,7 @@ jobRearrange='\ |
708 | 711 | module load python/2.7.12\n\ |
709 | 712 | #module remove xerces/2.7\n\ |
710 | 713 | #module load xerces/2.8\n\ |
711 | -module load pygdal/2.1.0-py2.7\n\ | |
714 | +module load gcc/6.3.0\n\ | |
712 | 715 | \n\ |
713 | 716 | FileConfig=%s\n\ |
714 | 717 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -737,7 +740,7 @@ jobGenCmdStat='\ |
737 | 740 | module load python/2.7.12\n\ |
738 | 741 | #module remove xerces/2.7\n\ |
739 | 742 | #module load xerces/2.8\n\ |
740 | -module load pygdal/2.1.0-py2.7\n\ | |
743 | +module load gcc/6.3.0\n\ | |
741 | 744 | \n\ |
742 | 745 | FileConfig=%s\n\ |
743 | 746 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -765,7 +768,7 @@ jobGenJobLaunchFusion='\ |
765 | 768 | module load python/2.7.12\n\ |
766 | 769 | #module remove xerces/2.7\n\ |
767 | 770 | #module load xerces/2.8\n\ |
768 | -module load pygdal/2.1.0-py2.7\n\ | |
771 | +module load gcc/6.3.0\n\ | |
769 | 772 | \n\ |
770 | 773 | FileConfig=%s\n\ |
771 | 774 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -794,7 +797,7 @@ jobGenJobLaunchStat='\ |
794 | 797 | module load python/2.7.12\n\ |
795 | 798 | #module remove xerces/2.7\n\ |
796 | 799 | #module load xerces/2.8\n\ |
797 | -module load pygdal/2.1.0-py2.7\n\ | |
800 | +module load gcc/6.3.0\n\ | |
798 | 801 | \n\ |
799 | 802 | FileConfig=%s\n\ |
800 | 803 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -823,7 +826,7 @@ jobGenCmdTrain='\ |
823 | 826 | module load python/2.7.12\n\ |
824 | 827 | #module remove xerces/2.7\n\ |
825 | 828 | #module load xerces/2.8\n\ |
826 | -module load pygdal/2.1.0-py2.7\n\ | |
829 | +module load gcc/6.3.0\n\ | |
827 | 830 | \n\ |
828 | 831 | FileConfig=%s\n\ |
829 | 832 | #export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -855,7 +858,7 @@ jobGenJobLaunchTrain='\ |
855 | 858 | module load python/2.7.12\n\ |
856 | 859 | #module remove xerces/2.7\n\ |
857 | 860 | #module load xerces/2.8\n\ |
858 | -module load pygdal/2.1.0-py2.7\n\ | |
861 | +module load gcc/6.3.0\n\ | |
859 | 862 | \n\ |
860 | 863 | FileConfig=%s\n\ |
861 | 864 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -884,7 +887,7 @@ jobGenCmdClass='\ |
884 | 887 | module load python/2.7.12\n\ |
885 | 888 | #module remove xerces/2.7\n\ |
886 | 889 | #module load xerces/2.8\n\ |
887 | -module load pygdal/2.1.0-py2.7\n\ | |
890 | +module load gcc/6.3.0\n\ | |
888 | 891 | \n\ |
889 | 892 | FileConfig=%s\n\ |
890 | 893 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -924,7 +927,7 @@ jobGenJobLaunchClass='\ |
924 | 927 | module load python/2.7.12\n\ |
925 | 928 | #module remove xerces/2.7\n\ |
926 | 929 | #module load xerces/2.8\n\ |
927 | -module load pygdal/2.1.0-py2.7\n\ | |
930 | +module load gcc/6.3.0\n\ | |
928 | 931 | \n\ |
929 | 932 | FileConfig=%s\n\ |
930 | 933 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -953,7 +956,7 @@ jobCmdFusion='\ |
953 | 956 | module load python/2.7.12\n\ |
954 | 957 | #module remove xerces/2.7\n\ |
955 | 958 | #module load xerces/2.8\n\ |
956 | -module load pygdal/2.1.0-py2.7\n\ | |
959 | +module load gcc/6.3.0\n\ | |
957 | 960 | \n\ |
958 | 961 | FileConfig=%s\n\ |
959 | 962 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -989,7 +992,7 @@ jobGenJobNoData='\ |
989 | 992 | module load python/2.7.12\n\ |
990 | 993 | #module remove xerces/2.7\n\ |
991 | 994 | #module load xerces/2.8\n\ |
992 | -module load pygdal/2.1.0-py2.7\n\ | |
995 | +module load gcc/6.3.0\n\ | |
993 | 996 | \n\ |
994 | 997 | FileConfig=%s\n\ |
995 | 998 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1018,7 +1021,7 @@ jobClassifShaping='\ |
1018 | 1021 | module load python/2.7.12\n\ |
1019 | 1022 | #module remove xerces/2.7\n\ |
1020 | 1023 | #module load xerces/2.8\n\ |
1021 | -module load pygdal/2.1.0-py2.7\n\ | |
1024 | +module load gcc/6.3.0\n\ | |
1022 | 1025 | \n\ |
1023 | 1026 | FileConfig=%s\n\ |
1024 | 1027 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1049,7 +1052,7 @@ jobGenCmdConf='\ |
1049 | 1052 | module load python/2.7.12\n\ |
1050 | 1053 | #module remove xerces/2.7\n\ |
1051 | 1054 | #module load xerces/2.8\n\ |
1052 | -module load pygdal/2.1.0-py2.7\n\ | |
1055 | +module load gcc/6.3.0\n\ | |
1053 | 1056 | \n\ |
1054 | 1057 | FileConfig=%s\n\ |
1055 | 1058 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1079,7 +1082,7 @@ jobGenJobLaunchConfusion='\ |
1079 | 1082 | module load python/2.7.12\n\ |
1080 | 1083 | #module remove xerces/2.7\n\ |
1081 | 1084 | #module load xerces/2.8\n\ |
1082 | -module load pygdal/2.1.0-py2.7\n\ | |
1085 | +module load gcc/6.3.0\n\ | |
1083 | 1086 | \n\ |
1084 | 1087 | FileConfig=%s\n\ |
1085 | 1088 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1108,7 +1111,7 @@ jobfusionConfusion='\ |
1108 | 1111 | module load python/2.7.12\n\ |
1109 | 1112 | #module remove xerces/2.7\n\ |
1110 | 1113 | #module load xerces/2.8\n\ |
1111 | -module load pygdal/2.1.0-py2.7\n\ | |
1114 | +module load gcc/6.3.0\n\ | |
1112 | 1115 | \n\ |
1113 | 1116 | FileConfig=%s\n\ |
1114 | 1117 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1137,7 +1140,7 @@ jobGenResults='\ |
1137 | 1140 | module load python/2.7.12\n\ |
1138 | 1141 | #module remove xerces/2.7\n\ |
1139 | 1142 | #module load xerces/2.8\n\ |
1140 | -module load pygdal/2.1.0-py2.7\n\ | |
1143 | +module load gcc/6.3.0\n\ | |
1141 | 1144 | \n\ |
1142 | 1145 | FileConfig=%s\n\ |
1143 | 1146 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1164,7 +1167,7 @@ GenJobLaunchOutStat='\ |
1164 | 1167 | module load python/2.7.12\n\ |
1165 | 1168 | #module remove xerces/2.7\n\ |
1166 | 1169 | #module load xerces/2.8\n\ |
1167 | -module load pygdal/2.1.0-py2.7\n\ | |
1170 | +module load gcc/6.3.0\n\ | |
1168 | 1171 | \n\ |
1169 | 1172 | FileConfig=%s\n\ |
1170 | 1173 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1192,7 +1195,7 @@ jobMergeOutStat='\ |
1192 | 1195 | module load python/2.7.12\n\ |
1193 | 1196 | #module remove xerces/2.7\n\ |
1194 | 1197 | #module load xerces/2.8\n\ |
1195 | -module load pygdal/2.1.0-py2.7\n\ | |
1198 | +module load gcc/6.3.0\n\ | |
1196 | 1199 | \n\ |
1197 | 1200 | FileConfig=%s\n\ |
1198 | 1201 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1216,7 +1219,7 @@ jobMergeCorrStats='\ |
1216 | 1219 | #PBS -e %s/mergeCorrStats_err.log\n\ |
1217 | 1220 | \n\ |
1218 | 1221 | module load python/2.7.12\n\ |
1219 | -module load pygdal/2.1.0-py2.7\n\ | |
1222 | +module load gcc/6.3.0\n\ | |
1220 | 1223 | \n\ |
1221 | 1224 | FileConfig=%s\n\ |
1222 | 1225 | export ITK_AUTOLOAD_PATH=""\n\ |
... | ... | @@ -1230,3 +1233,28 @@ CONFIG=$FileConfig\n\ |
1230 | 1233 | python computeStats.py -wd $TMPDIR -conf $CONFIG\n\ |
1231 | 1234 | \n\ |
1232 | 1235 | ' |
1236 | + | |
1237 | +jobFormattingVectors='\ | |
1238 | +#!/bin/bash\n\ | |
1239 | +#PBS -N formattingV\n\ | |
1240 | +#PBS -l select=1:ncpus=2:mem=10gb\n\ | |
1241 | +#PBS -l walltime=30:00:00\n\ | |
1242 | +#PBS -o %s/formattingVectors_out.log\n\ | |
1243 | +#PBS -e %s/formattingVectors_err.log\n\ | |
1244 | +\n\ | |
1245 | +module load python/2.7.12\n\ | |
1246 | +module load gcc/6.3.0\n\ | |
1247 | +#module load gcc/6.3.0\n\ | |
1248 | +\n\ | |
1249 | +FileConfig=%s\n\ | |
1250 | +export ITK_AUTOLOAD_PATH=""\n\ | |
1251 | +export OTB_HOME=$(grep --only-matching --perl-regex "^((?!#).)*(?<=OTB_HOME\:).*" $FileConfig | cut -d "\'" -f 2)\n\ | |
1252 | +. $OTB_HOME/config_otb.sh\n\ | |
1253 | +\n\ | |
1254 | +PYPATH=$(grep --only-matching --perl-regex "^((?!#).)*(?<=pyAppPath\:).*" $FileConfig | cut -d "\'" -f 2)\n\ | |
1255 | +cd $PYPATH\n\ | |
1256 | +CONFIG=$FileConfig\n\ | |
1257 | +\n\ | |
1258 | +python formatting_vectors.py -wD $TMPDIR -conf $CONFIG\n\ | |
1259 | +\n\ | |
1260 | +' | ... | ... |
scripts/common/fileUtils.py
... | ... | @@ -211,12 +211,27 @@ def dateInterval(dateMin,dataMax,tr): |
211 | 211 | curr += delta |
212 | 212 | |
213 | 213 | def updatePyPath(): |
214 | - moduleDirectoryName = ["SAR"] | |
214 | + """ | |
215 | + usage : add some child/parent directories to PYTHONPATH needed en IOTA2 | |
216 | + warning : this script depend of IOTA2 architecture | |
217 | + | |
218 | + TODO : | |
219 | + transform IOTA2 project as python module arch | |
220 | + """ | |
221 | + #child directories | |
222 | + moduleDirectoryName = ["SAR", "MPI"] | |
215 | 223 | currentDirectory = os.path.dirname(os.path.realpath(__file__)) |
216 | - for currentModule in moduleDirectoryName : | |
217 | - modPath = currentDirectory+"/"+currentModule | |
224 | + for currentModule in moduleDirectoryName: | |
225 | + modPath = currentDirectory + "/" + currentModule | |
218 | 226 | if not modPath in sys.path: |
219 | 227 | sys.path.append(modPath) |
228 | + #parent directories | |
229 | + ext_mod = ["vector-tools"] | |
230 | + parent = "/".join(os.path.abspath(os.path.join(os.path.realpath(__file__), os.pardir)).split("/")[0:-1]) | |
231 | + for currentModule in ext_mod: | |
232 | + ext_mod_path = os.path.join(parent, currentModule) | |
233 | + if not ext_mod_path in sys.path: | |
234 | + sys.path.append(ext_mod_path) | |
220 | 235 | |
221 | 236 | def updateDirectory(src, dst): |
222 | 237 | |
... | ... | @@ -1324,7 +1339,7 @@ def getListTileFromModel(modelIN,pathToConfig): |
1324 | 1339 | return model.tilesList.split("_") |
1325 | 1340 | |
1326 | 1341 | def fileSearchRegEx(Pathfile): |
1327 | - return [f for f in glob.glob(Pathfile)] | |
1342 | + return [f for f in glob.glob(Pathfile.replace("[","[[]"))] | |
1328 | 1343 | |
1329 | 1344 | def getShapeExtent(shape_in): |
1330 | 1345 | """ | ... | ... |
... | ... | @@ -0,0 +1,162 @@ |
1 | +#!/usr/bin/python | |
2 | +#-*- coding: utf-8 -*- | |
3 | + | |
4 | +# ========================================================================= | |
5 | +# Program: iota2 | |
6 | +# | |
7 | +# Copyright (c) CESBIO. All rights reserved. | |
8 | +# | |
9 | +# See LICENSE for details. | |
10 | +# | |
11 | +# This software is distributed WITHOUT ANY WARRANTY; without even | |
12 | +# the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR | |
13 | +# PURPOSE. See the above copyright notices for more information. | |
14 | +# | |
15 | +# ========================================================================= | |
16 | + | |
17 | +import argparse | |
18 | +import fileUtils as fut | |
19 | +import serviceConfigFile as SCF | |
20 | +import shutil | |
21 | +import os | |
22 | +from Utils import run | |
23 | +fut.updatePyPath() | |
24 | + | |
25 | +from AddField import addField | |
26 | + | |
27 | + | |
28 | +def get_regions(vec_name): | |
29 | + """ | |
30 | + """ | |
31 | + regions = [] | |
32 | + for elem in range(2, len(vec_name.split("_"))): | |
33 | + if vec_name.split("_")[elem] == "seed": | |
34 | + break | |
35 | + else: | |
36 | + regions.append(vec_name.split("_")[elem]) | |
37 | + return regions | |
38 | + | |
39 | + | |
40 | +def split_vector_by_region(in_vect, output_dir, region_field, driver="ESRI shapefile", | |
41 | + proj_in="EPSG:2154", proj_out="EPSG:2154"): | |
42 | + """ | |
43 | + usage : split a vector considering a field value | |
44 | + | |
45 | + IN | |
46 | + in_vect [string] : input vector path | |
47 | + output_dir [string] : path to output directory | |
48 | + region_field [string] | |
49 | + driver [string] | |
50 | + proj_in [string] | |
51 | + proj_out [string] | |
52 | + OUT | |
53 | + output_paths [list of strings] : paths to new output vectors | |
54 | + """ | |
55 | + | |
56 | + output_paths = [] | |
57 | + | |
58 | + #const | |
59 | + tile_pos = 0 | |
60 | + seed_pos = -2 | |
61 | + | |
62 | + vec_name = os.path.split(in_vect)[-1] | |
63 | + tile = vec_name.split("_")[tile_pos] | |
64 | + seed = vec_name.split("_")[seed_pos].split(".")[0] | |
65 | + extent = os.path.splitext(vec_name)[-1] | |
66 | + | |
67 | + regions = get_regions(vec_name) | |
68 | + | |
69 | + table = vec_name.split(".")[0] | |
70 | + if driver != "ESRI shapefile": | |
71 | + table = "output" | |
72 | + #split vector | |
73 | + for region in regions: | |
74 | + out_vec_name = "_".join([tile, "region", region, "seed" + seed, "Samples"]) | |
75 | + output_vec = os.path.join(output_dir, out_vec_name + extent) | |
76 | + output_paths.append(output_vec) | |
77 | + sql_cmd = "select * FROM " + table + " WHERE " + region_field + "='" + region + "'" | |
78 | + cmd = 'ogr2ogr -t_srs ' + proj_out + ' -s_srs ' + proj_in + ' -nln ' + table + ' -f "' + driver + '" -sql "' + sql_cmd + '" ' + output_vec + ' ' + in_vect | |
79 | + run(cmd) | |
80 | + | |
81 | + return output_paths | |
82 | + | |
83 | + | |
84 | +def merge_vectors(data_app_val_dir, output_dir, region_field, runs, tile): | |
85 | + """ | |
86 | + usage : for each vectors in tile, add a region field and concatenates them | |
87 | + | |
88 | + IN | |
89 | + data_app_val_dir [string] : path to the folder containing vectors | |
90 | + output_dir [string] : path to output direcotry | |