修改正射后向加入dem裁剪模块,防止dem过大影响效率,修改植被物候特征筛选模块
parent
5cb2f17d4f
commit
f3b421eb2c
|
@ -0,0 +1 @@
|
|||
/dem-L-SAR/run_log/
|
|
@ -12,6 +12,7 @@ import logging
|
|||
|
||||
from tool.algorithm.block.blockprocess import BlockProcess
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.xml.AnalysisXml import DictXml
|
||||
from tool.algorithm.xml.CreateMetaDict import CreateMetaDict, CreateProductXml
|
||||
from tool.algorithm.algtools.PreProcess import PreProcess as pp
|
||||
import tarfile
|
||||
|
@ -523,11 +524,17 @@ class OrthoMain:
|
|||
|
||||
pass
|
||||
|
||||
|
||||
def cut_dem(self, dem_merged_path, meta_file_path):
|
||||
left_up_lon = 0
|
||||
left_up_lat = 0
|
||||
|
||||
_, scopes = DictXml(meta_file_path).get_extend()
|
||||
intersect_polygon = pp().intersect_polygon(scopes)
|
||||
if intersect_polygon is None:
|
||||
raise Exception('cal intersect box fail!')
|
||||
shp_path = os.path.join(self.__workspace_Temporary_path, 'IntersectPolygon.shp')
|
||||
if pp().write_polygon_shp(shp_path, intersect_polygon, 4326) is False:
|
||||
raise Exception('create intersect shp fail!')
|
||||
dem_process = os.path.join(self.__workspace_Temporary_path, 'dem_cut.tif')
|
||||
pp().cut_img(dem_process, dem_merged_path, shp_path)
|
||||
return dem_process
|
||||
|
||||
def process_sim_ori(self, ori_sim, sim_ori):
|
||||
|
||||
|
|
|
@ -42,7 +42,7 @@
|
|||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>E:\MicroWorkspace\LT1B\LT230919\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966.tar.gz</ParaValue>
|
||||
<ParaValue>F:\MicroWorkspace\LT1B\LT230919\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966.tar.gz</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>False</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
|
@ -57,7 +57,7 @@
|
|||
<ParaType>File</ParaType>
|
||||
<DataType>tif</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>E:\MicroWorkspace\LT1B\LT230919\DEM30m.tif</ParaValue>
|
||||
<ParaValue>F:\MicroWorkspace\LT1B\LT230919\DEM30m.tif</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>True</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
|
@ -74,7 +74,7 @@
|
|||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>D:\micro\LWork\BackScattering\Output\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966-cal.tar.gz</ParaValue>
|
||||
<ParaValue>D:\micro\LWork\BackScattering\Output\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966-CAL.tar.gz</ParaValue>
|
||||
<MaxValue>DEFAULT</MaxValue>
|
||||
<MinValue>DEFAULT</MinValue>
|
||||
<OptionValue>DEFAULT</OptionValue>
|
||||
|
|
|
@ -11,6 +11,7 @@
|
|||
import logging
|
||||
from tool.algorithm.algtools.logHandler import LogHandler
|
||||
from tool.algorithm.xml.AlgXmlHandle import ManageAlgXML, CheckSource
|
||||
from tool.algorithm.xml.AnalysisXml import DictXml
|
||||
from tool.algorithm.xml.CreateMetaDict import CreateMetaDict, CreateProductXml
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.algtools.PreProcess import PreProcess as pp
|
||||
|
@ -24,7 +25,6 @@ import shutil
|
|||
import tarfile
|
||||
import sys
|
||||
|
||||
|
||||
if cf.get('debug') == 'True':
|
||||
DEBUG = True
|
||||
else:
|
||||
|
@ -249,6 +249,18 @@ class ScatteringMain:
|
|||
if os.path.exists(path):
|
||||
self.del_floder(path)
|
||||
|
||||
def cut_dem(self, dem_merged_path, meta_file_path):
|
||||
_, scopes = DictXml(meta_file_path).get_extend()
|
||||
intersect_polygon = pp().intersect_polygon(scopes)
|
||||
if intersect_polygon is None:
|
||||
raise Exception('cal intersect box fail!')
|
||||
shp_path = os.path.join(self.__workspace_preprocessing_path, 'IntersectPolygon.shp')
|
||||
if pp().write_polygon_shp(shp_path, intersect_polygon, 4326) is False:
|
||||
raise Exception('create intersect shp fail!')
|
||||
dem_process = os.path.join(self.__workspace_preprocessing_path, 'dem_cut.tif')
|
||||
pp().cut_img(dem_process, dem_merged_path, shp_path)
|
||||
return dem_process
|
||||
|
||||
def process_sim_ori(self, ori_sim, sim_ori):
|
||||
|
||||
scopes = ()
|
||||
|
@ -303,6 +315,7 @@ class ScatteringMain:
|
|||
out_dem_path = self.__workspace_preprocessing_path
|
||||
dem_merged_path=DEMProcess.dem_merged(in_dem_path, meta_file_path, out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif
|
||||
|
||||
dem_path = self.cut_dem(dem_merged_path, meta_file_path)
|
||||
in_slc_path=None
|
||||
for slc_path in in_tif_paths:
|
||||
if slc_path.find(".tiff")>0 and (slc_path.find("_HH_")>0 or slc_path.find("_VV_")>0):
|
||||
|
@ -310,7 +323,7 @@ class ScatteringMain:
|
|||
break
|
||||
|
||||
# 获取校正模型后
|
||||
Orthorectification.preCaldem_sar_rc(dem_merged_path,in_slc_path,self.__workspace_preprocessing_path,self.__workspace_processing_path.replace("\\","\\\\")) # 初步筛选坐标范围
|
||||
Orthorectification.preCaldem_sar_rc(dem_path,in_slc_path,self.__workspace_preprocessing_path,self.__workspace_processing_path.replace("\\","\\\\")) # 初步筛选坐标范围
|
||||
logger.info('progress bar: 40%')
|
||||
# clip_dem_reample_path=os.path.join(self.__workspace_preprocessing_path, "SAR_dem.tiff")
|
||||
# infooption=gdal.InfoOptions("-json")
|
||||
|
@ -465,7 +478,7 @@ if __name__ == '__main__':
|
|||
start = datetime.datetime.now()
|
||||
try:
|
||||
if len(sys.argv)<2:
|
||||
xml_path = 'BackScattering.xml'
|
||||
xml_path = 'BackScattering-L-SAR.xml'
|
||||
else:
|
||||
xml_path = sys.argv[1]
|
||||
ScatteringMain = ScatteringMain(xml_path)
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
ERROR:mylog:run-time error!
|
||||
Traceback (most recent call last):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 471, in <module>
|
||||
if not ScatteringMain.process_handle(start):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 292, in process_handle
|
||||
Orthorectification.IndirectOrthorectification(self.__in_processing_paras["SLC"], self.__workspace_processing_path) # 改动1
|
||||
File "D:\estar-proj\microproduct-L-SAR\backScattering\BackScatteringAlg.py", line 1520, in IndirectOrthorectification
|
||||
self.header_info = self.ParseHearderFile(os.path.join(FilePath_str, header_name))
|
||||
File "D:\estar-proj\microproduct-L-SAR\backScattering\BackScatteringAlg.py", line 1094, in ParseHearderFile
|
||||
GPSPoints = FindInfomationFromJson(HeaderFile_dom_json, GPSNode_Path)
|
||||
File "D:\estar-proj\microproduct-L-SAR\backScattering\BackScatteringAlg.py", line 131, in FindInfomationFromJson
|
||||
result_node = result_node[nodename]
|
||||
KeyError: 'product'
|
||||
INFO:mylog:running use time: 0:00:17.072831
|
|
@ -1,30 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
WARNING:mylog:LTMetaData.get_QualifyValue() error!
|
||||
ERROR:mylog:run-time error!
|
||||
Traceback (most recent call last):
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 85, in get_QualifyValue
|
||||
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 47, in get_QualifyValue
|
||||
QualifyValue = float(root.find('processing').find('processingParameter').find('quantifyValue').find(polarization).text)
|
||||
AttributeError: 'NoneType' object has no attribute 'find'
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
|
||||
Traceback (most recent call last):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 471, in <module>
|
||||
if not ScatteringMain.process_handle(start):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 371, in process_handle
|
||||
alg.sar_backscattering_coef(in_tif_path, meta_file_path, out_tif_path)
|
||||
File "D:\estar-proj\microproduct-L-SAR\backScattering\BackScatteringAlg.py", line 93, in sar_backscattering_coef
|
||||
QualifyValue = MetaDataHandler.get_QualifyValue(meta_file_path, polarization)
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 88, in get_QualifyValue
|
||||
QualifyValue = GF3L1AMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 27, in get_QualifyValue
|
||||
QualifyValue = float(root.find('imageinfo').find('QualifyValue').find(polarization).text)
|
||||
AttributeError: 'NoneType' object has no attribute 'find'
|
||||
INFO:mylog:running use time: 0:00:26.594524
|
|
@ -1,30 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
WARNING:mylog:LTMetaData.get_QualifyValue() error!
|
||||
ERROR:mylog:run-time error!
|
||||
Traceback (most recent call last):
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 85, in get_QualifyValue
|
||||
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 47, in get_QualifyValue
|
||||
QualifyValue = float(root.find('processing').find('processingParameter').find('quantifyValue').find(polarization).text)
|
||||
AttributeError: 'NoneType' object has no attribute 'find'
|
||||
|
||||
During handling of the above exception, another exception occurred:
|
||||
|
||||
Traceback (most recent call last):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 471, in <module>
|
||||
if not ScatteringMain.process_handle(start):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 371, in process_handle
|
||||
alg.sar_backscattering_coef(in_tif_path, meta_file_path, out_tif_path)
|
||||
File "D:\estar-proj\microproduct-L-SAR\backScattering\BackScatteringAlg.py", line 93, in sar_backscattering_coef
|
||||
QualifyValue = MetaDataHandler.get_QualifyValue(meta_file_path, polarization)
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 88, in get_QualifyValue
|
||||
QualifyValue = GF3L1AMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\algtools\MetaDataHandler.py", line 27, in get_QualifyValue
|
||||
QualifyValue = float(root.find('imageinfo').find('QualifyValue').find(polarization).text)
|
||||
AttributeError: 'NoneType' object has no attribute 'find'
|
||||
INFO:mylog:running use time: 0:08:05.774058
|
|
@ -1,5 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
|
@ -1,17 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
INFO:mylog:progress bar: 90%
|
||||
ERROR:mylog:run-time error!
|
||||
Traceback (most recent call last):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 471, in <module>
|
||||
if not ScatteringMain.process_handle(start):
|
||||
File "D:/estar-proj/microproduct-L-SAR/backScattering/BackScatteringMain.py", line 437, in process_handle
|
||||
para_dict = CreateMetaDict(image_path, self.__in_processing_paras['META'], self.__workspace_processing_path,
|
||||
File "D:\estar-proj\microproduct-L-SAR\tool\algorithm\xml\CreateMetaDict.py", line 58, in calu_nature
|
||||
imageinfo_widthspace = self.ImageHandler.get_geotransform(self.out_path1)[1] # 投影后的分辨率
|
||||
TypeError: 'NoneType' object is not subscriptable
|
||||
INFO:mylog:running use time: 0:10:04.334320
|
|
@ -1,7 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
INFO:mylog:progress bar: 90%
|
|
@ -1,7 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
INFO:mylog:progress bar: 90%
|
|
@ -1,11 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
INFO:mylog:progress bar: 90%
|
||||
INFO:mylog:process_handle finished!
|
||||
INFO:mylog:progress bar: 100%
|
||||
INFO:mylog:successful production of backscattering products!
|
||||
INFO:mylog:running use time: 0:12:32.600524
|
|
@ -1,11 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-L-SAR\backScattering
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:mylog:progress bar: 40%
|
||||
INFO:mylog:progress bar: 90%
|
||||
INFO:mylog:process_handle finished!
|
||||
INFO:mylog:progress bar: 100%
|
||||
INFO:mylog:successful production of backscattering products!
|
||||
INFO:mylog:running use time: 0:12:19.438164
|
|
@ -38,7 +38,7 @@
|
|||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Man</ParaSource>
|
||||
<ParaValue>E:\MicroWorkspace\LT1B\LT230919\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966-ortho.tar.gz</ParaValue>
|
||||
<ParaValue>F:\MicroWorkspace\LT1B\LT230919\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966-ortho.tar.gz</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>False</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
|
@ -53,7 +53,7 @@
|
|||
<ParaType>File</ParaType>
|
||||
<DataType>csv</DataType>
|
||||
<ParaSource>Man</ParaSource>
|
||||
<ParaValue>E:\MicroWorkspace\LT1B\LT230919\LT1B_landaCoverSample.csv</ParaValue>
|
||||
<ParaValue>F:\MicroWorkspace\LT1B\LT230919\LT1B_landaCoverSample.csv</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>True</EnMultipleChoice>
|
||||
<Control>UploadInput</Control>
|
||||
|
|
|
@ -1,21 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-l-sar\landcover-L-SAR
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:root:scope0:[[130.58930555555554, 47.875416666666666], [131.11458333333331, 47.875416666666666], [130.58930555555554, 47.428472222222226], [131.11458333333331, 47.428472222222226]]
|
||||
INFO:root:scope1:[[130.58930555555554, 47.875416666666666], [131.11458333333331, 47.875416666666666], [130.58930555555554, 47.428472222222226], [131.11458333333331, 47.428472222222226]]
|
||||
INFO:root:scope roi :[(130.58930555555554, 47.428472222222226), (130.58930555555554, 47.875416666666666), (131.11458333333331, 47.875416666666666), (131.11458333333331, 47.428472222222226)]
|
||||
INFO:mylog:cut sim_ori success!
|
||||
INFO:mylog:preprocess_handle success!
|
||||
INFO:mylog:1,water,num:2598827
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:2,build,num:260469
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:3,dryland,num:12290
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:4,road,num:803810
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:read csv data success!
|
||||
INFO:mylog:progress bar: 20%
|
||||
INFO:mylog:refine_lee filter success!
|
||||
INFO:mylog:progress bar: 30%
|
|
@ -1,44 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-l-sar\landcover-L-SAR
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
||||
INFO:mylog:check_source success!
|
||||
INFO:root:scope0:[[130.58930555555554, 47.875416666666666], [131.11458333333331, 47.875416666666666], [130.58930555555554, 47.428472222222226], [131.11458333333331, 47.428472222222226]]
|
||||
INFO:root:scope1:[[130.58930555555554, 47.875416666666666], [131.11458333333331, 47.875416666666666], [130.58930555555554, 47.428472222222226], [131.11458333333331, 47.428472222222226]]
|
||||
INFO:root:scope roi :[(130.58930555555554, 47.428472222222226), (130.58930555555554, 47.875416666666666), (131.11458333333331, 47.875416666666666), (131.11458333333331, 47.428472222222226)]
|
||||
INFO:mylog:cut sim_ori success!
|
||||
INFO:mylog:preprocess_handle success!
|
||||
INFO:mylog:1,water,num:2598827
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:2,build,num:260469
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:3,dryland,num:12290
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:4,road,num:803810
|
||||
INFO:mylog:max number =10000, random select10000 point as train data!
|
||||
INFO:mylog:read csv data success!
|
||||
INFO:mylog:progress bar: 20%
|
||||
INFO:mylog:refine_lee filter success!
|
||||
INFO:mylog:progress bar: 30%
|
||||
INFO:root:feature_tif_paths:{'Freeman_Dbl': 'D:\\micro\\LWork\\LandCover\\Temporary\\processing\\feature_tif\\Freeman_Dbl.tif', 'Freeman_Odd': 'D:\\micro\\LWork\\LandCover\\Temporary\\processing\\feature_tif\\Freeman_Odd.tif', 'Freeman_Vol': 'D:\\micro\\LWork\\LandCover\\Temporary\\processing\\feature_tif\\Freeman_Vol.tif'}
|
||||
INFO:mylog:decompose feature success!
|
||||
INFO:mylog:progress bar: 50%
|
||||
INFO:mylog:feature_list:['0: Freeman_Dbl_geo.tif', '1: Freeman_Odd_geo.tif', '2: Freeman_Vol_geo.tif']
|
||||
INFO:mylog:gene_train_set success!
|
||||
INFO:mylog:importances:[0.0872527 0.40417686 0.50857044],threshold=0.07
|
||||
INFO:mylog:optimal_feature:[2, 1, 0]
|
||||
INFO:mylog:correlation_map:
|
||||
[[0. 1. 1.]
|
||||
[0. 0. 1.]
|
||||
[0. 0. 0.]]
|
||||
INFO:mylog:validity_list_corr:[2]
|
||||
INFO:mylog:[2]
|
||||
INFO:mylog:train_feature:['2: Freeman_Vol_geo.tif']
|
||||
INFO:mylog:RF trainning
|
||||
INFO:mylog:RF train successful
|
||||
INFO:mylog:progress bar: 60%
|
||||
INFO:mylog:test_feature:dict_keys(['Freeman_Vol_geo'])
|
||||
INFO:mylog:blocking tifs success!
|
||||
INFO:mylog:create features matrix success!
|
||||
INFO:mylog:testing
|
||||
INFO:mylog:test success!
|
||||
INFO:mylog:progress bar: 95%
|
|
@ -1,30 +0,0 @@
|
|||
INFO:mylog:total:195,block:0 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_0_1024.tif
|
||||
INFO:mylog:total:195,block:1 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_10240_11264.tif
|
||||
INFO:mylog:total:195,block:9 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_4096_5120.tif
|
||||
INFO:mylog:total:195,block:10 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_5120_6144.tif
|
||||
INFO:mylog:total:195,block:19 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_12288_13312.tif
|
||||
INFO:mylog:total:195,block:20 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_13312_14336.tif
|
||||
INFO:mylog:total:195,block:29 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_9216_10240.tif
|
||||
INFO:mylog:total:195,block:30 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_0_1024.tif
|
||||
INFO:mylog:total:195,block:40 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_5120_6144.tif
|
||||
INFO:mylog:total:195,block:41 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_6144_7168.tif
|
||||
INFO:mylog:total:195,block:52 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_2048_3072.tif
|
||||
INFO:mylog:total:195,block:53 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_3072_4096.tif
|
||||
INFO:mylog:total:195,block:64 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_12288_13312.tif
|
||||
INFO:mylog:total:195,block:65 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_13312_14336.tif
|
||||
INFO:mylog:total:195,block:77 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_1024_2048.tif
|
||||
INFO:mylog:total:195,block:78 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_11264_12288.tif
|
||||
INFO:mylog:total:195,block:92 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_1024_2048.tif
|
||||
INFO:mylog:total:195,block:93 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_11264_12288.tif
|
||||
INFO:mylog:total:195,block:107 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_1024_2048.tif
|
||||
INFO:mylog:total:195,block:108 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_11264_12288.tif
|
||||
INFO:mylog:total:195,block:122 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_1024_2048.tif
|
||||
INFO:mylog:total:195,block:123 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_11264_12288.tif
|
||||
INFO:mylog:total:195,block:137 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_1024_2048.tif
|
||||
INFO:mylog:total:195,block:138 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_11264_12288.tif
|
||||
INFO:mylog:total:195,block:152 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_1024_2048.tif
|
||||
INFO:mylog:total:195,block:153 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_11264_12288.tif
|
||||
INFO:mylog:total:195,block:167 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_1024_2048.tif
|
||||
INFO:mylog:total:195,block:168 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_11264_12288.tif
|
||||
INFO:mylog:total:195,block:182 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_1024_2048.tif
|
||||
INFO:mylog:total:195,block:183 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_11264_12288.tif
|
|
@ -1,15 +0,0 @@
|
|||
INFO:mylog:total:195,block:2 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_1024_2048.tif
|
||||
INFO:mylog:total:195,block:11 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_6144_7168.tif
|
||||
INFO:mylog:total:195,block:21 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_13931_14955.tif
|
||||
INFO:mylog:total:195,block:31 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_10240_11264.tif
|
||||
INFO:mylog:total:195,block:42 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_7168_8192.tif
|
||||
INFO:mylog:total:195,block:54 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_4096_5120.tif
|
||||
INFO:mylog:total:195,block:67 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_2048_3072.tif
|
||||
INFO:mylog:total:195,block:80 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_13312_14336.tif
|
||||
INFO:mylog:total:195,block:95 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_13312_14336.tif
|
||||
INFO:mylog:total:195,block:110 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_13312_14336.tif
|
||||
INFO:mylog:total:195,block:125 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_13312_14336.tif
|
||||
INFO:mylog:total:195,block:140 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_13312_14336.tif
|
||||
INFO:mylog:total:195,block:155 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_13312_14336.tif
|
||||
INFO:mylog:total:195,block:170 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_13312_14336.tif
|
||||
INFO:mylog:total:195,block:185 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_13312_14336.tif
|
|
@ -1,30 +0,0 @@
|
|||
INFO:mylog:total:195,block:3 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_11264_12288.tif
|
||||
INFO:mylog:total:195,block:4 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_12288_13312.tif
|
||||
INFO:mylog:total:195,block:12 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_7168_8192.tif
|
||||
INFO:mylog:total:195,block:13 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_8192_9216.tif
|
||||
INFO:mylog:total:195,block:22 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_2048_3072.tif
|
||||
INFO:mylog:total:195,block:23 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_3072_4096.tif
|
||||
INFO:mylog:total:195,block:32 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_1024_2048.tif
|
||||
INFO:mylog:total:195,block:33 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_11264_12288.tif
|
||||
INFO:mylog:total:195,block:43 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_8192_9216.tif
|
||||
INFO:mylog:total:195,block:44 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_9216_10240.tif
|
||||
INFO:mylog:total:195,block:55 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_5120_6144.tif
|
||||
INFO:mylog:total:195,block:56 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_6144_7168.tif
|
||||
INFO:mylog:total:195,block:68 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_3072_4096.tif
|
||||
INFO:mylog:total:195,block:69 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_4096_5120.tif
|
||||
INFO:mylog:total:195,block:82 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_2048_3072.tif
|
||||
INFO:mylog:total:195,block:83 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_3072_4096.tif
|
||||
INFO:mylog:total:195,block:97 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_2048_3072.tif
|
||||
INFO:mylog:total:195,block:98 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_3072_4096.tif
|
||||
INFO:mylog:total:195,block:112 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_2048_3072.tif
|
||||
INFO:mylog:total:195,block:113 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_3072_4096.tif
|
||||
INFO:mylog:total:195,block:127 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_2048_3072.tif
|
||||
INFO:mylog:total:195,block:128 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_3072_4096.tif
|
||||
INFO:mylog:total:195,block:142 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_2048_3072.tif
|
||||
INFO:mylog:total:195,block:143 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_3072_4096.tif
|
||||
INFO:mylog:total:195,block:157 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_2048_3072.tif
|
||||
INFO:mylog:total:195,block:158 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_3072_4096.tif
|
||||
INFO:mylog:total:195,block:172 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_2048_3072.tif
|
||||
INFO:mylog:total:195,block:173 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_3072_4096.tif
|
||||
INFO:mylog:total:195,block:187 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_2048_3072.tif
|
||||
INFO:mylog:total:195,block:188 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_3072_4096.tif
|
|
@ -1,30 +0,0 @@
|
|||
INFO:mylog:total:195,block:5 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_13312_14336.tif
|
||||
INFO:mylog:total:195,block:6 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_13931_14955.tif
|
||||
INFO:mylog:total:195,block:14 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_9216_10240.tif
|
||||
INFO:mylog:total:195,block:15 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_0_1024.tif
|
||||
INFO:mylog:total:195,block:24 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_4096_5120.tif
|
||||
INFO:mylog:total:195,block:25 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_5120_6144.tif
|
||||
INFO:mylog:total:195,block:35 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_13312_14336.tif
|
||||
INFO:mylog:total:195,block:36 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_13931_14955.tif
|
||||
INFO:mylog:total:195,block:46 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_10240_11264.tif
|
||||
INFO:mylog:total:195,block:47 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_1024_2048.tif
|
||||
INFO:mylog:total:195,block:58 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_8192_9216.tif
|
||||
INFO:mylog:total:195,block:59 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_9216_10240.tif
|
||||
INFO:mylog:total:195,block:71 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_6144_7168.tif
|
||||
INFO:mylog:total:195,block:72 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_7168_8192.tif
|
||||
INFO:mylog:total:195,block:85 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_5120_6144.tif
|
||||
INFO:mylog:total:195,block:86 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_6144_7168.tif
|
||||
INFO:mylog:total:195,block:100 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_5120_6144.tif
|
||||
INFO:mylog:total:195,block:101 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_6144_7168.tif
|
||||
INFO:mylog:total:195,block:115 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_5120_6144.tif
|
||||
INFO:mylog:total:195,block:116 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_6144_7168.tif
|
||||
INFO:mylog:total:195,block:130 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_5120_6144.tif
|
||||
INFO:mylog:total:195,block:131 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_6144_7168.tif
|
||||
INFO:mylog:total:195,block:145 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_5120_6144.tif
|
||||
INFO:mylog:total:195,block:146 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_6144_7168.tif
|
||||
INFO:mylog:total:195,block:160 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_5120_6144.tif
|
||||
INFO:mylog:total:195,block:161 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_6144_7168.tif
|
||||
INFO:mylog:total:195,block:175 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_5120_6144.tif
|
||||
INFO:mylog:total:195,block:176 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_6144_7168.tif
|
||||
INFO:mylog:total:195,block:190 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_5120_6144.tif
|
||||
INFO:mylog:total:195,block:191 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_6144_7168.tif
|
|
@ -1,15 +0,0 @@
|
|||
INFO:mylog:total:195,block:7 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_2048_3072.tif
|
||||
INFO:mylog:total:195,block:16 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_10240_11264.tif
|
||||
INFO:mylog:total:195,block:26 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_6144_7168.tif
|
||||
INFO:mylog:total:195,block:37 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_2048_3072.tif
|
||||
INFO:mylog:total:195,block:49 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_12288_13312.tif
|
||||
INFO:mylog:total:195,block:61 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_10240_11264.tif
|
||||
INFO:mylog:total:195,block:74 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_9216_10240.tif
|
||||
INFO:mylog:total:195,block:88 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_8192_9216.tif
|
||||
INFO:mylog:total:195,block:103 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_8192_9216.tif
|
||||
INFO:mylog:total:195,block:118 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_8192_9216.tif
|
||||
INFO:mylog:total:195,block:133 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_8192_9216.tif
|
||||
INFO:mylog:total:195,block:148 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_8192_9216.tif
|
||||
INFO:mylog:total:195,block:163 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_8192_9216.tif
|
||||
INFO:mylog:total:195,block:178 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_8192_9216.tif
|
||||
INFO:mylog:total:195,block:193 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_8192_9216.tif
|
|
@ -1,14 +0,0 @@
|
|||
INFO:mylog:total:195,block:8 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_0_1024_3072_4096.tif
|
||||
INFO:mylog:total:195,block:18 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_11264_12288.tif
|
||||
INFO:mylog:total:195,block:28 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_8192_9216.tif
|
||||
INFO:mylog:total:195,block:39 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_4096_5120.tif
|
||||
INFO:mylog:total:195,block:51 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_13931_14955.tif
|
||||
INFO:mylog:total:195,block:63 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_11264_12288.tif
|
||||
INFO:mylog:total:195,block:76 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_10240_11264.tif
|
||||
INFO:mylog:total:195,block:91 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_10240_11264.tif
|
||||
INFO:mylog:total:195,block:106 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_10240_11264.tif
|
||||
INFO:mylog:total:195,block:121 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_10240_11264.tif
|
||||
INFO:mylog:total:195,block:136 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_10240_11264.tif
|
||||
INFO:mylog:total:195,block:151 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_10240_11264.tif
|
||||
INFO:mylog:total:195,block:166 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_10240_11264.tif
|
||||
INFO:mylog:total:195,block:181 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_10240_11264.tif
|
|
@ -1,14 +0,0 @@
|
|||
INFO:mylog:total:195,block:17 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_1024_2048.tif
|
||||
INFO:mylog:total:195,block:27 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_10240_11264_7168_8192.tif
|
||||
INFO:mylog:total:195,block:38 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_3072_4096.tif
|
||||
INFO:mylog:total:195,block:50 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_13312_14336.tif
|
||||
INFO:mylog:total:195,block:62 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_1024_2048.tif
|
||||
INFO:mylog:total:195,block:75 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_0_1024.tif
|
||||
INFO:mylog:total:195,block:89 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_9216_10240.tif
|
||||
INFO:mylog:total:195,block:104 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_9216_10240.tif
|
||||
INFO:mylog:total:195,block:119 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_9216_10240.tif
|
||||
INFO:mylog:total:195,block:134 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_9216_10240.tif
|
||||
INFO:mylog:total:195,block:149 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_9216_10240.tif
|
||||
INFO:mylog:total:195,block:164 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_9216_10240.tif
|
||||
INFO:mylog:total:195,block:179 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_9216_10240.tif
|
||||
INFO:mylog:total:195,block:194 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_9216_10240.tif
|
|
@ -1,23 +0,0 @@
|
|||
INFO:mylog:total:195,block:34 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_1024_2048_12288_13312.tif
|
||||
INFO:mylog:total:195,block:45 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_0_1024.tif
|
||||
INFO:mylog:total:195,block:48 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_11264_12288.tif
|
||||
INFO:mylog:total:195,block:57 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11264_12288_7168_8192.tif
|
||||
INFO:mylog:total:195,block:60 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_0_1024.tif
|
||||
INFO:mylog:total:195,block:70 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_5120_6144.tif
|
||||
INFO:mylog:total:195,block:73 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_8192_9216.tif
|
||||
INFO:mylog:total:195,block:84 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_4096_5120.tif
|
||||
INFO:mylog:total:195,block:87 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_7168_8192.tif
|
||||
INFO:mylog:total:195,block:99 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_4096_5120.tif
|
||||
INFO:mylog:total:195,block:102 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_7168_8192.tif
|
||||
INFO:mylog:total:195,block:114 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_4096_5120.tif
|
||||
INFO:mylog:total:195,block:117 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_7168_8192.tif
|
||||
INFO:mylog:total:195,block:129 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_4096_5120.tif
|
||||
INFO:mylog:total:195,block:132 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_7168_8192.tif
|
||||
INFO:mylog:total:195,block:144 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_4096_5120.tif
|
||||
INFO:mylog:total:195,block:147 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_7168_8192.tif
|
||||
INFO:mylog:total:195,block:159 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_4096_5120.tif
|
||||
INFO:mylog:total:195,block:162 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_7168_8192.tif
|
||||
INFO:mylog:total:195,block:174 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_4096_5120.tif
|
||||
INFO:mylog:total:195,block:177 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_7168_8192.tif
|
||||
INFO:mylog:total:195,block:189 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_4096_5120.tif
|
||||
INFO:mylog:total:195,block:192 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_7168_8192.tif
|
|
@ -1,9 +0,0 @@
|
|||
INFO:mylog:total:195,block:66 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_11701_12725_13931_14955.tif
|
||||
INFO:mylog:total:195,block:79 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_12288_13312.tif
|
||||
INFO:mylog:total:195,block:94 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_12288_13312.tif
|
||||
INFO:mylog:total:195,block:109 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_12288_13312.tif
|
||||
INFO:mylog:total:195,block:124 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_12288_13312.tif
|
||||
INFO:mylog:total:195,block:139 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_12288_13312.tif
|
||||
INFO:mylog:total:195,block:154 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_12288_13312.tif
|
||||
INFO:mylog:total:195,block:169 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_12288_13312.tif
|
||||
INFO:mylog:total:195,block:184 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_12288_13312.tif
|
|
@ -1,8 +0,0 @@
|
|||
INFO:mylog:total:195,block:81 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_2048_3072_13931_14955.tif
|
||||
INFO:mylog:total:195,block:96 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_13931_14955.tif
|
||||
INFO:mylog:total:195,block:111 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_13931_14955.tif
|
||||
INFO:mylog:total:195,block:126 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_13931_14955.tif
|
||||
INFO:mylog:total:195,block:141 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_13931_14955.tif
|
||||
INFO:mylog:total:195,block:156 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_13931_14955.tif
|
||||
INFO:mylog:total:195,block:171 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_13931_14955.tif
|
||||
INFO:mylog:total:195,block:186 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_13931_14955.tif
|
|
@ -1,7 +0,0 @@
|
|||
INFO:mylog:total:195,block:90 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_3072_4096_0_1024.tif
|
||||
INFO:mylog:total:195,block:105 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_4096_5120_0_1024.tif
|
||||
INFO:mylog:total:195,block:120 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_5120_6144_0_1024.tif
|
||||
INFO:mylog:total:195,block:135 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_6144_7168_0_1024.tif
|
||||
INFO:mylog:total:195,block:150 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_7168_8192_0_1024.tif
|
||||
INFO:mylog:total:195,block:165 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_8192_9216_0_1024.tif
|
||||
INFO:mylog:total:195,block:180 testing data !path:D:\micro\LWork\LandCover\Temporary\processing\LandCover\LandCover_9216_10240_0_1024.tif
|
|
@ -1,3 +0,0 @@
|
|||
INFO:mylog:sysdir: D:\estar-proj\microproduct-l-sar\landcover-L-SAR
|
||||
INFO:mylog:init algXML succeed
|
||||
INFO:mylog:create new workspace success!
|
Binary file not shown.
|
@ -1,166 +0,0 @@
|
|||
#
|
||||
# 模型计算的库
|
||||
#
|
||||
import cython
|
||||
cimport cython # 必须导入
|
||||
import numpy as np
|
||||
cimport numpy as np
|
||||
from libc.math cimport pi
|
||||
from scipy.optimize import leastsq
|
||||
import random
|
||||
import logging
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
def WMCModel(param_arr,sample_lai,sample_soil,sample_inc,sample_sigma):
|
||||
""" WMC模型 增加 归一化植被指数
|
||||
|
||||
Args:
|
||||
param_arr (np.ndarray): 参数数组
|
||||
sample_lai (double): 叶面积指数
|
||||
sample_soil (double): 土壤含水量
|
||||
sample_inc (double): 入射角(弧度值)
|
||||
sample_sigma (double): 后向散射系数(线性值)
|
||||
|
||||
Returns:
|
||||
double: 方程值
|
||||
"""
|
||||
# 映射参数,方便修改模型
|
||||
A,B,C,D,M,N=param_arr # 在这里修改模型
|
||||
V_lai=sample_lai
|
||||
#V_lai=E*sample_lai+F
|
||||
exp_gamma=np.exp(-2*B*((V_lai*D+C))*(1/np.cos(sample_inc)))
|
||||
sigma_soil=M*sample_soil+N
|
||||
sigma_veg=A*((V_lai))*np.cos(sample_inc)
|
||||
f_veg=1
|
||||
result=sigma_veg*(1-exp_gamma)+sigma_soil*exp_gamma-sample_sigma
|
||||
return result
|
||||
|
||||
|
||||
|
||||
|
||||
def train_WMCmodel(lai_water_inc_sigma_list,params_X0,train_err_image_path,draw_flag=True):
|
||||
""" 训练模型参数
|
||||
|
||||
Args:
|
||||
lai_waiter_inc_sigma_list (list): 训练模型使用的样本呢
|
||||
"""
|
||||
def f(X):
|
||||
eqs=[]
|
||||
for lai_water_inc_sigma_item in lai_water_inc_sigma_list:
|
||||
sample_lai=lai_water_inc_sigma_item[4]
|
||||
sample_sigma=lai_water_inc_sigma_item[5] # 5: csv_sigma, 8:tiff_sigma
|
||||
sample_soil=lai_water_inc_sigma_item[6]
|
||||
sample_inc=lai_water_inc_sigma_item[7]
|
||||
FVC=lai_water_inc_sigma_item[8]
|
||||
eqs.append(WMCModel(X,sample_lai,sample_soil,sample_inc,sample_sigma))
|
||||
return eqs
|
||||
|
||||
X0 = params_X0 # 初始值
|
||||
# logger.info(str(X0))
|
||||
h = leastsq(f, X0)
|
||||
# logger.info(h[0],h[1])
|
||||
err_f=f(h[0])
|
||||
x_arr=[lai_waiter_inc_sigma_item[4] for lai_waiter_inc_sigma_item in lai_water_inc_sigma_list]
|
||||
# 根据误差大小进行排序
|
||||
# logger.info("训练集:\n根据误差输出点序\n数量:{}\n点序\t误差值\t 样点信息".format(str(np.array(err_f).shape)))
|
||||
# for i in np.argsort(np.array(err_f)):
|
||||
# logger.info('{}\t{}\t{}'.format(i,err_f[i],str(lai_water_inc_sigma_list[i])))
|
||||
# logger.info("\n误差点序输出结束\n")
|
||||
|
||||
if draw_flag:
|
||||
# logger.info(err_f)
|
||||
# logger.info(np.where(np.abs(err_f)<10))
|
||||
from matplotlib import pyplot as plt
|
||||
plt.scatter(x_arr,err_f)
|
||||
plt.title("equation-err")
|
||||
plt.savefig(train_err_image_path,dpi=600)
|
||||
plt.show()
|
||||
|
||||
return h[0]
|
||||
|
||||
def test_WMCModel(lai_waiter_inc_sigma_list,param_arr,lai_X0,test_err_image_path,draw_flag=True):
|
||||
""" 测试模型训练结果
|
||||
|
||||
Args:
|
||||
lai_waiter_inc_sigma_list (list): 测试使用的样本集
|
||||
A (_type_): 参数A
|
||||
B (_type_): 参数B
|
||||
C (_type_): 参数C
|
||||
D (_type_): 参数D
|
||||
M (_type_): 参数M
|
||||
N (_type_): 参数N
|
||||
lai_X0 (_type_): 初始值
|
||||
|
||||
Returns:
|
||||
list: 误差列表 [sample_lai,err,predict]
|
||||
"""
|
||||
err=[]
|
||||
err_f=[]
|
||||
x_arr=[]
|
||||
err_lai=[]
|
||||
for lai_waiter_inc_sigma_item in lai_waiter_inc_sigma_list:
|
||||
sample_time,sample_code,sample_lon,sample_lat,sample_lai,csv_sigma,sample_soil,sample_inc,sample_sigma=lai_waiter_inc_sigma_item
|
||||
def f(X):
|
||||
lai=X[0]
|
||||
eqs=[WMCModel(param_arr,lai,sample_soil,sample_inc,csv_sigma)]
|
||||
return eqs
|
||||
X0=lai_X0
|
||||
h = leastsq(f, X0)
|
||||
temp_err=h[0]-sample_lai
|
||||
err_lai.append(temp_err[0]) # lai预测的插值
|
||||
err.append([sample_lai,temp_err[0],h[0][0],sample_code])
|
||||
err_f.append(f(h[0])[0]) # 方程差
|
||||
x_arr.append(sample_lai)
|
||||
|
||||
# 根据误差大小进行排序
|
||||
# logger.info("测试集:\n根据误差输出点序\n数量:{}\n点序\t误差值\t 方程差\t样点信息".format(str(np.array(err_lai).shape)))
|
||||
# for i in np.argsort(np.array(err_lai)):
|
||||
# logger.info('{}\t{}\t{}\t{}'.format(i,err_lai[i],err_f[i],str(lai_waiter_inc_sigma_list[i])))
|
||||
# logger.info("\n误差点序输出结束\n")
|
||||
|
||||
if draw_flag:
|
||||
from matplotlib import pyplot as plt
|
||||
plt.scatter(x_arr,err_lai)
|
||||
plt.title("equation-err")
|
||||
plt.savefig(test_err_image_path,dpi=600)
|
||||
plt.show()
|
||||
return err
|
||||
|
||||
def processs_WMCModel(param_arr,lai_X0,sigma,inc_angle,soil_water):
|
||||
|
||||
if(sigma<0 ):
|
||||
return np.nan
|
||||
def f(X):
|
||||
lai=X[0]
|
||||
eqs=[WMCModel(param_arr,lai,soil_water,inc_angle,sigma )]
|
||||
return eqs
|
||||
h = leastsq(f, [lai_X0])
|
||||
|
||||
return h[0][0]
|
||||
|
||||
# Cython 的扩展地址
|
||||
cpdef np.ndarray[double,ndim=2] process_tiff(np.ndarray[double,ndim=2] sigma_tiff,
|
||||
np.ndarray[double,ndim=2] inc_tiff,
|
||||
np.ndarray[double,ndim=2] soil_water_tiff,
|
||||
np.ndarray[double,ndim=1] param_arr,
|
||||
double lai_X0):
|
||||
|
||||
cdef np.ndarray[double,ndim=2] result=sigma_tiff
|
||||
cdef int param_arr_length=param_arr.shape[0]
|
||||
cdef int height=sigma_tiff.shape[0]
|
||||
cdef int width=sigma_tiff.shape[1]
|
||||
cdef int i=0
|
||||
cdef int j=0
|
||||
cdef double temp=0
|
||||
|
||||
while i<height:
|
||||
j=0
|
||||
while j<width:
|
||||
temp = processs_WMCModel(param_arr,lai_X0,sigma_tiff[i,j],inc_tiff[i,j],soil_water_tiff[i,j])
|
||||
temp=temp if temp<10 and temp>=0 else np.nan
|
||||
result[i,j]=temp
|
||||
j=j+1
|
||||
i=i+1
|
||||
return result
|
||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,45 +0,0 @@
|
|||
from setuptools import setup
|
||||
from setuptools.extension import Extension
|
||||
from Cython.Distutils import build_ext
|
||||
from Cython.Build import cythonize
|
||||
import numpy
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
|
||||
class MyBuildExt(build_ext):
|
||||
def run(self):
|
||||
build_ext.run(self)
|
||||
|
||||
build_dir = Path(self.build_lib)
|
||||
root_dir = Path(__file__).parent
|
||||
target_dir = build_dir if not self.inplace else root_dir
|
||||
|
||||
self.copy_file(Path('./LAIProcess') / '__init__.py', root_dir, target_dir)
|
||||
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
|
||||
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
|
||||
def copy_file(self, path, source_dir, destination_dir):
|
||||
if not (source_dir / path).exists():
|
||||
return
|
||||
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
|
||||
|
||||
setup(
|
||||
name="MyModule",
|
||||
ext_modules=cythonize(
|
||||
[
|
||||
#Extension("pkg1.*", ["root/pkg1/*.py"]),
|
||||
Extension("pkg2.*", ["./LAIProcess.pyx"]),
|
||||
#Extension("1.*", ["root/*.py"])
|
||||
],
|
||||
build_dir="build",
|
||||
compiler_directives=dict(
|
||||
always_allow_keywords=True
|
||||
)),
|
||||
cmdclass=dict(
|
||||
build_ext=MyBuildExt
|
||||
),
|
||||
packages=[],
|
||||
include_dirs=[numpy.get_include()],
|
||||
)
|
||||
|
||||
# 指令: python setup.py build_ext --inplace
|
|
@ -1,117 +0,0 @@
|
|||
# -*- encoding: utf-8 -*-
|
||||
# code from https://blog.csdn.net/theonegis/article/details/54427906
|
||||
from osgeo import gdal
|
||||
from osgeo import osr
|
||||
import numpy as np
|
||||
|
||||
def getSRSPair(dataset):
|
||||
"""
|
||||
获得给定数据的投影参考系和地理参考系
|
||||
:param dataset: GDAL地理数据
|
||||
:return: 投影参考系和地理参考系
|
||||
"""
|
||||
prosrs = osr.SpatialReference()
|
||||
prosrs.ImportFromWkt(dataset.GetProjection())
|
||||
geosrs = prosrs.CloneGeogCS()
|
||||
return prosrs, geosrs
|
||||
|
||||
|
||||
def geo2lonlat(dataset, x, y):
|
||||
"""
|
||||
将投影坐标转为经纬度坐标(具体的投影坐标系由给定数据确定)
|
||||
:param dataset: GDAL地理数据
|
||||
:param x: 投影坐标x
|
||||
:param y: 投影坐标y
|
||||
:return: 投影坐标(x, y)对应的经纬度坐标(lon, lat)
|
||||
"""
|
||||
prosrs, geosrs = getSRSPair(dataset)
|
||||
ct = osr.CoordinateTransformation(prosrs, geosrs)
|
||||
coords = ct.TransformPoint(x, y)
|
||||
return coords[:2]
|
||||
|
||||
|
||||
def lonlat2geo(dataset, lon, lat):
|
||||
"""
|
||||
将经纬度坐标转为投影坐标(具体的投影坐标系由给定数据确定)
|
||||
:param dataset: GDAL地理数据
|
||||
:param lon: 地理坐标lon经度
|
||||
:param lat: 地理坐标lat纬度
|
||||
:return: 经纬度坐标(lon, lat)对应的投影坐标
|
||||
"""
|
||||
prosrs, geosrs = getSRSPair(dataset)
|
||||
ct = osr.CoordinateTransformation(geosrs, prosrs)
|
||||
coords = ct.TransformPoint(lat, lon)
|
||||
return coords[:2]
|
||||
|
||||
|
||||
def imagexy2geo(dataset, row, col):
|
||||
"""
|
||||
根据GDAL的六参数模型将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换)
|
||||
:param dataset: GDAL地理数据
|
||||
:param row: 像素的行号
|
||||
:param col: 像素的列号
|
||||
:return: 行列号(row, col)对应的投影坐标或地理坐标(x, y)
|
||||
"""
|
||||
trans = dataset.GetGeoTransform()
|
||||
px = trans[0] + col * trans[1] + row * trans[2]
|
||||
py = trans[3] + col * trans[4] + row * trans[5]
|
||||
return px, py
|
||||
|
||||
|
||||
def geo2imagexy(dataset, x, y):
|
||||
"""
|
||||
根据GDAL的六 参数模型将给定的投影或地理坐标转为影像图上坐标(行列号)
|
||||
:param dataset: GDAL地理数据
|
||||
:param x: 投影或地理坐标x
|
||||
:param y: 投影或地理坐标y
|
||||
:return: 影坐标或地理坐标(x, y)对应的影像图上行列号(col, row)
|
||||
"""
|
||||
trans = dataset.GetGeoTransform()
|
||||
a = np.array([[trans[1], trans[2]], [trans[4], trans[5]]])
|
||||
b = np.array([x - trans[0], y - trans[3]])
|
||||
return np.linalg.solve(a, b) # 使用numpy的linalg.solve进行二元一次方程的求解
|
||||
|
||||
|
||||
def test1():
|
||||
gdal.AllRegister()
|
||||
tif = 'D:/DATA/testdata/GLCFCS30_E110N25.tif'
|
||||
# dataset = gdal.Open(r"D:\\DATA\\雷达测试\\GaoFen3_20200528_HH_DB.tif")
|
||||
dataset = gdal.Open(tif)
|
||||
|
||||
print('数据投影:')
|
||||
print(dataset.GetProjection())
|
||||
print('数据的大小(行,列):')
|
||||
print('(%s %s)' % (dataset.RasterYSize, dataset.RasterXSize))
|
||||
|
||||
x = 793214.118
|
||||
y = 2485865.527
|
||||
lon = 113.84897082317516
|
||||
lat = 22.453998686022448
|
||||
row = 24576
|
||||
col = 22540
|
||||
|
||||
print('图上坐标 -> 投影坐标:')
|
||||
coords = imagexy2geo(dataset, row, col)
|
||||
print('(%s, %s)->(%s, %s)' % (row, col, coords[0], coords[1]))
|
||||
print('投影坐标 -> 图上坐标:')
|
||||
coords = geo2imagexy(dataset, x, y)
|
||||
col = coords[0]
|
||||
row = coords[1]
|
||||
print('(%s, %s)->(%s, %s)' % (x, y, coords[0], coords[1]))
|
||||
|
||||
print('投影坐标 -> 经纬度:')
|
||||
coords = geo2lonlat(dataset, x, y)
|
||||
print('(%s, %s)->(%s, %s)' % (x, y, coords[0], coords[1]))
|
||||
print('经纬度 -> 投影坐标:')
|
||||
coords = lonlat2geo(dataset, lon, lat)
|
||||
print('(%s, %s)->(%s, %s)' % (lon, lat, coords[0], coords[1]))
|
||||
|
||||
coords1 = geo2lonlat(dataset, 657974.118, 2633321.527)
|
||||
print(coords1)
|
||||
coords2 = geo2lonlat(dataset, 793214.118, 2485865.527)
|
||||
print(coords2)
|
||||
pass
|
||||
|
||||
# if __name__ == '__main__':
|
||||
#
|
||||
# print('done')
|
|
@ -1,156 +0,0 @@
|
|||
"""
|
||||
@Project :microproduct
|
||||
@File :DEMJoint
|
||||
@Function :主函数
|
||||
@Author :LMM
|
||||
@Date :2021/10/19 14:39
|
||||
@Version :1.0.0
|
||||
"""
|
||||
from osgeo import gdal, osr
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
|
||||
class DEMProcess:
|
||||
"""
|
||||
DEM拼接、重采样
|
||||
"""
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_extent(fn):
|
||||
'''
|
||||
原文链接:https://blog.csdn.net/XBR_2014/article/details/85255412
|
||||
'''
|
||||
ds = gdal.Open(fn)
|
||||
rows = ds.RasterYSize
|
||||
cols = ds.RasterXSize
|
||||
# 获取图像角点坐标
|
||||
gt = ds.GetGeoTransform()
|
||||
minx = gt[0]
|
||||
maxy = gt[3]
|
||||
maxx = gt[0] + gt[1] * rows
|
||||
miny = gt[3] + gt[5] * cols
|
||||
return (minx, maxy, maxx, miny)
|
||||
|
||||
@staticmethod
|
||||
def img_mosaic(in_files, out_dem_path):
|
||||
# 通过两两比较大小,将最终符合条件的四个角点坐标保存
|
||||
# 即为拼接图像的四个角点坐标
|
||||
minX, maxY, maxX, minY = DEMProcess.get_extent(in_files[0])
|
||||
for fn in in_files[1:]:
|
||||
minx, maxy, maxx, miny = DEMProcess.get_extent(fn)
|
||||
minX = min(minX, minx)
|
||||
maxY = max(maxY, maxy)
|
||||
maxX = max(maxX, maxx)
|
||||
minY = min(minY, miny)
|
||||
|
||||
# 获取输出图像的行列数
|
||||
in_ds = gdal.Open(in_files[0])
|
||||
bands_num = in_ds.RasterCount
|
||||
gt = in_ds.GetGeoTransform()
|
||||
rows = int((maxX - minX) / abs(gt[5]))
|
||||
cols = int((maxY - minY) / gt[1])
|
||||
|
||||
# 判断栅格数据的数据类型
|
||||
datatype = gdal.GDT_UInt16
|
||||
|
||||
# 创建输出图像
|
||||
driver = gdal.GetDriverByName('GTiff')
|
||||
out_dem = os.path.join(out_dem_path, 'mosaic0.tif')
|
||||
out_ds = driver.Create(out_dem, cols, rows, bands_num, datatype)
|
||||
out_ds.SetProjection(in_ds.GetProjection())
|
||||
|
||||
gt = list(in_ds.GetGeoTransform())
|
||||
gt[0], gt[3] = minX, maxY
|
||||
out_ds.SetGeoTransform(gt)
|
||||
|
||||
for fn in in_files:
|
||||
in_ds = gdal.Open(fn)
|
||||
x_size = in_ds.RasterXSize
|
||||
y_size = in_ds.RasterYSize
|
||||
trans = gdal.Transformer(in_ds, out_ds, [])
|
||||
success, xyz = trans.TransformPoint(False, 0, 0)
|
||||
x, y, z = map(int, xyz)
|
||||
for i in range(1, bands_num + 1):
|
||||
data = in_ds.GetRasterBand(i).ReadAsArray()
|
||||
out_band = out_ds.GetRasterBand(i)
|
||||
out_data = out_band.ReadAsArray(x, y, x_size, y_size)
|
||||
data = np.maximum(data, out_data)
|
||||
out_band.WriteArray(data, x, y)
|
||||
|
||||
del in_ds, out_band, out_ds
|
||||
|
||||
@staticmethod
|
||||
def dem_clip(OutFilePath, DEMFilePath, SelectArea):
|
||||
'''
|
||||
根据选择范围裁剪DEM,并输出
|
||||
agrs:
|
||||
outFilePath:裁剪DEM输出地址
|
||||
DEMFilePath:被裁减DEM地址
|
||||
SelectArea:list [(xmin,ymax),(xmax,ymin)] 框选范围 左上角,右下角
|
||||
'''
|
||||
DEM_ptr = gdal.Open(DEMFilePath)
|
||||
DEM_GeoTransform = DEM_ptr.GetGeoTransform() # 读取影像的投影变换
|
||||
DEM_InvGeoTransform = gdal.InvGeoTransform(DEM_GeoTransform)
|
||||
SelectAreaArrayPoints = [gdal.ApplyGeoTransform(DEM_InvGeoTransform, p[0], p[1]) for p in SelectArea]
|
||||
SelectAreaArrayPoints = list(map(lambda p: (int(p[0]), int(p[1])), SelectAreaArrayPoints)) # 确定坐标
|
||||
|
||||
[(ulx, uly), (brx, bry)] = SelectAreaArrayPoints
|
||||
rowCount, colCount = bry - uly, brx - ulx
|
||||
|
||||
# 输出DEM的桌面坐标转换
|
||||
Out_Transfrom = list(DEM_GeoTransform)
|
||||
Out_Transfrom[0] = SelectArea[0][0]
|
||||
Out_Transfrom[3] = SelectArea[0][1]
|
||||
|
||||
# 构建输出DEM
|
||||
Bands_num = DEM_ptr.RasterCount
|
||||
gtiff_driver = gdal.GetDriverByName('GTiff')
|
||||
datatype = gdal.GDT_UInt16
|
||||
out_dem = gtiff_driver.Create(OutFilePath, colCount, rowCount, Bands_num, datatype)
|
||||
out_dem.SetProjection(DEM_ptr.GetProjection())
|
||||
out_dem.SetGeoTransform(Out_Transfrom)
|
||||
|
||||
for i in range(1, Bands_num + 1):
|
||||
data_band = DEM_ptr.GetRasterBand(i)
|
||||
out_band = out_dem.GetRasterBand(i)
|
||||
data = data_band.ReadAsArray(ulx, uly, colCount, rowCount)
|
||||
out_band.WriteArray(data)
|
||||
del out_dem
|
||||
|
||||
@staticmethod
|
||||
def dem_resample(in_dem_path, out_dem_path):
|
||||
'''
|
||||
DEM重采样函数,默认坐标系为WGS84
|
||||
agrs:
|
||||
in_dem_path: 输入的DEM文件夹路径
|
||||
meta_file_path: 输入的xml元文件路径
|
||||
out_dem_path: 输出的DEM文件夹路径
|
||||
'''
|
||||
# 读取文件夹中所有的DEM
|
||||
dem_file_paths=[os.path.join(in_dem_path,dem_name) for dem_name in os.listdir(in_dem_path) if dem_name.find(".tif")>=0 and dem_name.find(".tif.")==-1]
|
||||
spatialreference=osr.SpatialReference()
|
||||
spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree
|
||||
spatialproj=spatialreference.ExportToWkt() # 导出投影结果
|
||||
# 将DEM拼接成一张大图
|
||||
mergeFile =gdal.BuildVRT(os.path.join(out_dem_path,"mergeDEM.tif"), dem_file_paths)
|
||||
out_DEM=os.path.join(out_dem_path,"mosaic.tif")
|
||||
gdal.Warp(out_DEM,
|
||||
mergeFile,
|
||||
format="GTiff",
|
||||
dstSRS=spatialproj,
|
||||
dstNodata=-9999,
|
||||
outputType=gdal.GDT_Float32)
|
||||
return out_DEM
|
||||
|
||||
|
||||
# if __name__ == "__main__":
|
||||
# DEMProcess = DEMProcess()
|
||||
# in_dem_path = r'F:\大气延迟\out_dem'
|
||||
# out_dem_path = r'F:\大气延迟\out_dem'
|
||||
# DEMProcess.dem_resample(in_dem_path, out_dem_path)
|
||||
|
||||
|
||||
|
|
@ -1,154 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :ScatteringAuxData.py
|
||||
@Function :后向散射
|
||||
@Author :SHJ
|
||||
@Contact:
|
||||
@Date :2022/6/29
|
||||
@Version :1.0.0
|
||||
修改历史:
|
||||
[修改序列] [修改日期] [修改者] [修改内容]
|
||||
1 2022-6-29 石海军 1.兼容GF3元文件和正射校正元文件提取信息
|
||||
"""
|
||||
import logging
|
||||
from xml.etree.ElementTree import ElementTree
|
||||
import math
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
class GF3L1AMetaData:
|
||||
def __init__(self):
|
||||
pass
|
||||
@staticmethod
|
||||
def get_QualifyValue(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
QualifyValue = float(root.find('imageinfo').find('QualifyValue').find(polarization).text)
|
||||
return QualifyValue
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_Kdb(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
Kdb = float(root.find('processinfo').find('CalibrationConst').find(polarization).text) if root.find('processinfo').find('CalibrationConst').find(polarization).text!="NULL" else 0
|
||||
return Kdb
|
||||
|
||||
class OrthoMetaData:
|
||||
def __init__(self):
|
||||
pass
|
||||
@staticmethod
|
||||
def get_QualifyValue(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
QualifyValue = float(root.find('processing').find('processingParameter').find('quantifyValue').find(polarization).text)
|
||||
return QualifyValue
|
||||
|
||||
@staticmethod
|
||||
def get_Kdb(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
Kdb = float(root.find('processing').find('processingParameter').find('calibrationConst').find(polarization).text)
|
||||
return Kdb
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_RadarCenterFrequency(meta_file_path):
|
||||
# 获取微波中心频率
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
RadarCenterFrequency = float(root.find('instrument').find('radarParameters').find('centerFrequency').text)
|
||||
return RadarCenterFrequency
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_lamda(meta_file_path):
|
||||
# 获取微波波长,单位:m
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
lamda = float(root.find('sensor').find('lamda').text)
|
||||
return lamda
|
||||
|
||||
class MetaDataHandler:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_QualifyValue(meta_file_path, polarization):
|
||||
try:
|
||||
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
except Exception:
|
||||
logger.warning('LTMetaData.get_QualifyValue() error!')
|
||||
QualifyValue = GF3L1AMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
logger.info('GF3L1AMetaData.get_QualifyValue() success!')
|
||||
return QualifyValue
|
||||
|
||||
@staticmethod
|
||||
def get_Kdb(meta_file_path, polarization):
|
||||
try:
|
||||
Kdb = OrthoMetaData.get_Kdb(meta_file_path, polarization)
|
||||
except Exception:
|
||||
logger.warning('LTMetaData.get_Kdb() error!')
|
||||
Kdb = GF3L1AMetaData.get_Kdb(meta_file_path, polarization)
|
||||
logger.info('GF3L1AMetaData.get_Kdb() success!')
|
||||
return Kdb
|
||||
|
||||
@staticmethod
|
||||
def get_RadarCenterFrequency(meta_file_path):
|
||||
# 获取微波中心频率,单位GHz
|
||||
RadarCenterFrequency = OrthoMetaData.get_RadarCenterFrequency(meta_file_path)
|
||||
return RadarCenterFrequency
|
||||
|
||||
@staticmethod
|
||||
def get_lamda(meta_file_path):
|
||||
# 获取微波波长,单位:m
|
||||
lamda = OrthoMetaData.get_lamda(meta_file_path)
|
||||
return lamda
|
||||
|
||||
class Calibration:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_Calibration_coefficient(meta_file_path, polarization):
|
||||
calibration = [0, 0, 0, 0]
|
||||
for i in polarization:
|
||||
if i == 'HH':
|
||||
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
|
||||
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
|
||||
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
|
||||
calibration[0] = math.sqrt(data_value)
|
||||
if i == 'HV':
|
||||
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
|
||||
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
|
||||
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
|
||||
calibration[1] = math.sqrt(data_value)
|
||||
if i == 'VH':
|
||||
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
|
||||
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
|
||||
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
|
||||
calibration[2] = math.sqrt(data_value)
|
||||
if i == 'VV':
|
||||
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
|
||||
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
|
||||
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
|
||||
calibration[3] = math.sqrt(data_value)
|
||||
return calibration
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# A = ScatteringAuxData()
|
||||
# dir = 'G:\MicroWorkspace\C-SAR\AuxSAR\GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485_geo/'
|
||||
# path = dir + 'GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485.meta.xml'
|
||||
# path1 = dir + 'OrthoProduct.meta.xml'
|
||||
# t1 = A.get_QualifyValue(path, 'HH')
|
||||
# t2 = A.get_Kdb(path, 'HH')
|
||||
# t3 = A.get_RadarCenterFrequency(path)
|
||||
# t4 = A.get_lamda(path)
|
||||
# pass
|
|
@ -1,527 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :PreProcess.py
|
||||
@Function :@Function: 坐标转换,坐标系转换,图像裁剪,重投影,重采样
|
||||
@Author :LMM
|
||||
@Date :2021/8/25 14:17
|
||||
@Version :1.0.0
|
||||
"""
|
||||
from shapely.geometry import Polygon # 导入 gdal库要放到这一句的后面,不然会引起错误
|
||||
|
||||
from osgeo import gdal
|
||||
from osgeo import gdalconst
|
||||
from osgeo import osr
|
||||
from osgeo import ogr
|
||||
import os
|
||||
import cv2
|
||||
import numpy as np
|
||||
import shutil
|
||||
import scipy.spatial.transform
|
||||
import scipy.spatial.transform._rotation_groups # 用于解决打包错误
|
||||
import scipy.special.cython_special # 用于解决打包错误
|
||||
import scipy.spatial.transform._rotation_groups # 解决打包的问题
|
||||
import shapefile
|
||||
from shapely.errors import TopologicalError
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
import logging
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
os.environ['PROJ_LIB'] = os.getcwd()
|
||||
|
||||
|
||||
|
||||
class PreProcess:
|
||||
"""
|
||||
预处理,所有的影像配准
|
||||
"""
|
||||
def __init__(self):
|
||||
self._ImageHandler = ImageHandler()
|
||||
pass
|
||||
|
||||
def cal_scopes(self, processing_paras):
|
||||
# 计算roi
|
||||
scopes = ()
|
||||
for key, value in processing_paras.items():
|
||||
if 'ori_sim' in key:
|
||||
scopes += (ImageHandler.get_scope_ori_sim(value),)
|
||||
if(processing_paras['box'] != "" or processing_paras['box'] != "empty"):
|
||||
scopes += self.box2scope(processing_paras['box'])
|
||||
return scopes
|
||||
|
||||
def cal_scopes_roi(self, processing_paras):
|
||||
return self.intersect_polygon(self.cal_scopes(processing_paras))
|
||||
|
||||
def cut_geoimg(self,workspace_preprocessing_path, para_names_geo, processing_paras):
|
||||
self.check_img_projection(workspace_preprocessing_path, para_names_geo, processing_paras)
|
||||
# 计算roi
|
||||
|
||||
scopes = self.cal_scopes(processing_paras)
|
||||
# 计算图像的轮廓,并求相交区域
|
||||
intersect_shp_path = os.path.join(workspace_preprocessing_path, 'IntersectPolygon.shp')
|
||||
scopes_roi = self.cal_intersect_shp(intersect_shp_path, para_names_geo, processing_paras, scopes)
|
||||
# 裁剪
|
||||
# 裁剪图像:裁剪微波图像,裁剪其他图像
|
||||
cutted_img_paths = self.cut_imgs(workspace_preprocessing_path, para_names_geo, processing_paras, intersect_shp_path)
|
||||
return cutted_img_paths, scopes_roi
|
||||
|
||||
|
||||
|
||||
def preprocessing(self, para_names, ref_img_name, processing_paras, workspace_preprocessing_path, workspace_preprocessed_path):
|
||||
# 读取每一张图像,检查图像坐标系
|
||||
|
||||
self.check_img_projection(workspace_preprocessing_path, para_names, processing_paras)
|
||||
|
||||
# 计算图像的轮廓,并求相交区域
|
||||
intersect_shp_path = os.path.join(workspace_preprocessing_path, 'IntersectPolygon.shp')
|
||||
self.cal_intersect_shp(intersect_shp_path, para_names, processing_paras,
|
||||
self.box2scope(processing_paras['box']))
|
||||
logger.info('create intersect shp success!')
|
||||
|
||||
# 裁剪图像:裁剪微波图像,裁剪其他图像
|
||||
cutted_img_paths = self.cut_imgs(workspace_preprocessing_path, para_names, processing_paras,
|
||||
intersect_shp_path)
|
||||
logger.info('cut images success!')
|
||||
|
||||
# 重采样:重采样到跟微波图像一致的分辨率,然后保存到临时目录
|
||||
|
||||
preprocessed_paras = self.resampling_img(workspace_preprocessed_path, para_names, cutted_img_paths,cutted_img_paths[ref_img_name])
|
||||
# 清除预处理缓存文件
|
||||
logger.info('preprocess_handle success!')
|
||||
return preprocessed_paras # cutted_img_paths
|
||||
|
||||
def get_ref_inf(self, ref_img_path):
|
||||
"""获取参考影像的图像信息"""
|
||||
ref_img_path = ref_img_path
|
||||
cols = ImageHandler.get_img_width(ref_img_path)
|
||||
rows = ImageHandler.get_img_height(ref_img_path)
|
||||
proj = ImageHandler.get_projection(ref_img_path)
|
||||
geo = ImageHandler.get_geotransform(ref_img_path)
|
||||
return ref_img_path, cols, rows, proj, geo
|
||||
|
||||
def check_img_projection(self, out_dir, para_names, processing_paras):
|
||||
"""
|
||||
读取每一张图像,检查图像坐标系;
|
||||
将投影坐标系影像转换为地理坐标系影像(EPSG:4326)
|
||||
:param para_names:需要检查的参数名称
|
||||
"""
|
||||
if len(para_names) == 0:
|
||||
return False
|
||||
for name in para_names:
|
||||
proj = ImageHandler.get_projection(processing_paras[name])
|
||||
keyword = proj.split("[", 2)[0]
|
||||
|
||||
if keyword == "PROJCS":
|
||||
# 投影坐标系 转 地理坐标系
|
||||
para_dir = os.path.split(processing_paras[name])
|
||||
out_para = os.path.join(out_dir, para_dir[1].split(".", 1)[0] + "_EPSG4326.tif")
|
||||
self.trans_epsg4326(out_para, processing_paras[name])
|
||||
processing_paras[name] = out_para
|
||||
elif len(keyword) == 0 or keyword.strip() == "" or keyword.isspace() is True:
|
||||
raise Exception('coordinate is missing!')
|
||||
|
||||
|
||||
def preprocessing_oh2004(self, para_names, processing_paras, workspace_preprocessing_path, workspace_preprocessed_path):
|
||||
# 读取每一张图像,检查图像坐标系
|
||||
|
||||
self.check_img_projection(workspace_preprocessing_path, para_names, processing_paras)
|
||||
|
||||
# 计算图像的轮廓,并求相交区域
|
||||
intersect_shp_path = os.path.join(workspace_preprocessing_path, 'IntersectPolygon.shp')
|
||||
scopes = self.cal_intersect_shp(intersect_shp_path, para_names, processing_paras,
|
||||
self.box2scope(processing_paras['box']))
|
||||
logger.info('create intersect shp success!')
|
||||
|
||||
# 裁剪图像:裁剪微波图像,裁剪其他图像
|
||||
cutted_img_paths = self.cut_imgs(workspace_preprocessed_path, para_names, processing_paras,
|
||||
intersect_shp_path)
|
||||
logger.info('cut images success!')
|
||||
|
||||
# 重采样:重采样到跟微波图像一致的分辨率,然后保存到临时目录
|
||||
|
||||
return cutted_img_paths, scopes
|
||||
|
||||
@staticmethod
|
||||
def lonlat2geo(lat, lon):
|
||||
"""
|
||||
WGS84转平面坐标
|
||||
Param: lat 为WGS_1984的纬度
|
||||
Param: lon 为WGS_1984的经度
|
||||
输出转换后的坐标x,y
|
||||
"""
|
||||
|
||||
dstsrs1 = osr.SpatialReference()
|
||||
dstsrs1.ImportFromEPSG(32649)
|
||||
|
||||
dstsrs2 = osr.SpatialReference()
|
||||
dstsrs2.ImportFromEPSG(4326)
|
||||
|
||||
ct = osr.CoordinateTransformation(dstsrs2, dstsrs1)
|
||||
coords = ct.TransformPoint(lat, lon)
|
||||
# print("输出转换后的坐标x,y:",coords[:2])
|
||||
return coords[:2]
|
||||
|
||||
@staticmethod
|
||||
def trans_geogcs2projcs(out_path, in_path):
|
||||
"""
|
||||
:param out_path:wgs84投影坐标影像保存路径
|
||||
:param in_path:地理坐标影像输入路径
|
||||
"""
|
||||
# 创建文件
|
||||
if os.path.exists(os.path.split(out_path)[0]) is False:
|
||||
os.makedirs(os.path.split(out_path)[0])
|
||||
options = gdal.WarpOptions(format='GTiff', srcSRS='EPSG:4326', dstSRS='EPSG:32649')
|
||||
gdal.Warp(out_path, in_path, options=options)
|
||||
|
||||
@staticmethod
|
||||
def trans_projcs2geogcs(out_path, in_path):
|
||||
"""
|
||||
:param out_path:wgs84地理坐标影像输入路径
|
||||
:param in_path:wgs84投影坐标影像保存路径
|
||||
"""
|
||||
# 创建文件
|
||||
if os.path.exists(os.path.split(out_path)[0]) is False:
|
||||
os.makedirs(os.path.split(out_path)[0])
|
||||
options = gdal.WarpOptions(format='GTiff', srcSRS='EPSG:32649', dstSRS='EPSG:4326')
|
||||
gdal.Warp(out_path, in_path, options=options)
|
||||
|
||||
@staticmethod
|
||||
def trans_projcs2geogcs(out_path, in_path ,EPSG_src=32649,EPSG_dst=4326):
|
||||
"""
|
||||
:param out_path:wgs84地理坐标影像输入路径
|
||||
:param in_path:wgs84投影坐标影像保存路径
|
||||
:param EPSG_src:原始投影系
|
||||
:param EPSG_dst:目标坐标系
|
||||
"""
|
||||
str_EPSG_src = 'EPSG:'+ str(EPSG_src)
|
||||
str_EPSG_dst = 'EPSG:'+ str(EPSG_dst)
|
||||
|
||||
# 创建文件
|
||||
if os.path.exists(os.path.split(out_path)[0]) is False:
|
||||
os.makedirs(os.path.split(out_path)[0])
|
||||
options = gdal.WarpOptions(format='GTiff', srcSRS=str_EPSG_src, dstSRS=str_EPSG_dst)
|
||||
gdal.Warp(out_path, in_path, options=options)
|
||||
|
||||
@staticmethod
|
||||
def trans_epsg4326(out_path, in_path):
|
||||
OutTile = gdal.Warp(out_path, in_path,
|
||||
dstSRS='EPSG:4326',
|
||||
resampleAlg=gdalconst.GRA_Bilinear
|
||||
)
|
||||
OutTile = None
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def box2scope(str_box):
|
||||
roi_box = ()
|
||||
if str_box == '' or str_box == 'empty':
|
||||
return roi_box
|
||||
box_list = [float(num) for num in list(str_box.split(';'))]
|
||||
if len(box_list) == 4:
|
||||
roi_box = ([[box_list[2], box_list[1]], [box_list[3], box_list[1]], [box_list[2], box_list[0]],
|
||||
[box_list[3], box_list[0]]],)
|
||||
return roi_box
|
||||
|
||||
def cal_intersect_shp(self, shp_path, para_names,processing_paras, add_scope =()):
|
||||
"""
|
||||
:param shp_path:相交区域矢量文件保存区域
|
||||
:param para_names:判断相交影像的名称
|
||||
:return: True or False
|
||||
"""
|
||||
scopes = ()
|
||||
|
||||
if len(add_scope) != 0:
|
||||
scopes += add_scope
|
||||
for name in para_names:
|
||||
scope_tuple = (self._ImageHandler.get_scope(processing_paras[name]),)
|
||||
scopes += scope_tuple
|
||||
for n, scope in zip( range(len(scopes)), scopes):
|
||||
logging.info("scope" + str(n) + ":%s", scope)
|
||||
|
||||
intersect_polygon = self.intersect_polygon(scopes)
|
||||
if intersect_polygon is None:
|
||||
logger.error('image range does not overlap!')
|
||||
raise Exception('create intersect shp fail!')
|
||||
logging.info("scope roi :%s", intersect_polygon)
|
||||
if self.write_polygon_shp(shp_path, intersect_polygon, 4326) is False:
|
||||
raise Exception('create intersect shp fail!')
|
||||
return intersect_polygon
|
||||
|
||||
@staticmethod
|
||||
def intersect_polygon(scopes_tuple):
|
||||
"""
|
||||
功能说明:计算多边形相交的区域坐标;注意:多边形区域会转变成凸区域再求交
|
||||
:param scopes_tuple: 输入多个区域坐标的tuple
|
||||
:return: 多边形相交的区域坐标((x0,y0),(x1,y1),..., (xn,yn))
|
||||
"""
|
||||
if len(scopes_tuple) < 2:
|
||||
logger.error('len(scopes_tuple) < 2')
|
||||
# return # todo 修改只有单景会出现无法判断相交区域问题
|
||||
|
||||
try:
|
||||
# python四边形对象,会自动计算四个点,最后四个点顺序为:左上 左下 右下 右上 左上
|
||||
tmp = tuple(scopes_tuple[0])
|
||||
poly_intersect = Polygon(tmp).convex_hull
|
||||
for i in range(len(scopes_tuple)-1):
|
||||
polygon_next = Polygon(tuple(scopes_tuple[i+1])).convex_hull
|
||||
if poly_intersect.intersects(polygon_next):
|
||||
poly_intersect = poly_intersect.intersection(polygon_next)
|
||||
else:
|
||||
msg = 'Image:' + str(i) + 'range does not overlap!'
|
||||
logger.error(msg)
|
||||
return
|
||||
return list(poly_intersect.boundary.coords)[:-1]
|
||||
# except shapely.geos.TopologicalError:
|
||||
except TopologicalError:
|
||||
logger.error('shapely.geos.TopologicalError occurred!')
|
||||
return
|
||||
|
||||
|
||||
@staticmethod
|
||||
def write_polygon_shp(out_shp_path, point_list, EPSG =32649):
|
||||
"""
|
||||
功能说明:创建闭环的矢量文件。
|
||||
:param out_shp_path :矢量文件保存路径
|
||||
:param point_list :装有闭环点的列表[[x0,y0],[x1,y1]...[xn,yn]]
|
||||
:return: True or False
|
||||
"""
|
||||
# 为了支持中文路径,请添加下面这句代码
|
||||
gdal.SetConfigOption("GDAL_FILENAME_IS_UTF8", "NO")
|
||||
# 为了使属性表字段支持中文,请添加下面这句
|
||||
gdal.SetConfigOption("SHAPE_ENCODING", "")
|
||||
# 注册所有的驱动
|
||||
ogr.RegisterAll()
|
||||
|
||||
# 创建数据,这里以创建ESRI的shp文件为例
|
||||
str_driver_name = "ESRI Shapefile"
|
||||
o_driver = ogr.GetDriverByName(str_driver_name)
|
||||
if o_driver is None:
|
||||
msg = 'driver('+str_driver_name+')is invalid value'
|
||||
logger.error(msg)
|
||||
return False
|
||||
|
||||
# 创建数据源
|
||||
if os.path.exists(out_shp_path) and os.path.isfile(out_shp_path): # 如果已存在同名文件
|
||||
os.remove(out_shp_path) # 则删除之
|
||||
o_ds = o_driver.CreateDataSource(out_shp_path)
|
||||
if o_ds is None:
|
||||
msg = 'create file failed!' + out_shp_path
|
||||
logger.error(msg)
|
||||
return False
|
||||
|
||||
# 创建图层,创建一个多边形图层
|
||||
srs = osr.SpatialReference()
|
||||
#srs.ImportFromEPSG(32649) # 投影坐标系,空间参考:WGS84
|
||||
|
||||
srs.ImportFromEPSG(EPSG) # 地理坐标系EPSG
|
||||
o_layer = o_ds.CreateLayer("TestPolygon", srs, ogr.wkbPolygon)
|
||||
if o_layer is None:
|
||||
msg = 'create coverage failed!'
|
||||
logger.error(msg)
|
||||
return False
|
||||
|
||||
# 下面创建属性表
|
||||
# 先创建一个叫FieldID的整型属性
|
||||
o_field_id = ogr.FieldDefn("FieldID", ogr.OFTInteger)
|
||||
o_layer.CreateField(o_field_id, 1)
|
||||
|
||||
# 再创建一个叫FeatureName的字符型属性,字符长度为50
|
||||
o_field_name = ogr.FieldDefn("FieldName", ogr.OFTString)
|
||||
o_field_name.SetWidth(100)
|
||||
o_layer.CreateField(o_field_name, 1)
|
||||
|
||||
o_defn = o_layer.GetLayerDefn()
|
||||
|
||||
# 创建矩形要素
|
||||
o_feature_rectangle = ogr.Feature(o_defn)
|
||||
o_feature_rectangle.SetField(0, 1)
|
||||
o_feature_rectangle.SetField(1, "IntersectRegion")
|
||||
|
||||
# 创建环对象ring
|
||||
ring = ogr.Geometry(ogr.wkbLinearRing)
|
||||
|
||||
for i in range(len(point_list)):
|
||||
ring.AddPoint(point_list[i][0], point_list[i][1])
|
||||
ring.CloseRings()
|
||||
|
||||
# 创建环对象polygon
|
||||
geom_rect_polygon = ogr.Geometry(ogr.wkbPolygon)
|
||||
geom_rect_polygon.AddGeometry(ring)
|
||||
|
||||
o_feature_rectangle.SetGeometry(geom_rect_polygon)
|
||||
o_layer.CreateFeature(o_feature_rectangle)
|
||||
|
||||
o_ds.Destroy()
|
||||
return True
|
||||
|
||||
def cut_imgs(self, out_dir, para_names, processing_paras, shp_path):
|
||||
"""
|
||||
使用矢量数据裁剪影像
|
||||
:param para_names:需要检查的参数名称
|
||||
:param shp_path:裁剪的shp文件
|
||||
"""
|
||||
if len(para_names) == 0:
|
||||
return {}
|
||||
cutted_img_paths = {}
|
||||
try:
|
||||
for name in para_names:
|
||||
input_path = processing_paras[name]
|
||||
output_path = os.path.join(out_dir, name + '_cut.tif')
|
||||
self.cut_img(output_path, input_path, shp_path)
|
||||
cutted_img_paths.update({name: output_path})
|
||||
logger.info('cut %s success!', name)
|
||||
except BaseException:
|
||||
logger.error('cut_img failed!')
|
||||
return {}
|
||||
return cutted_img_paths
|
||||
|
||||
@staticmethod
|
||||
def cut_img(output_path, input_path, shp_path):
|
||||
"""
|
||||
:param output_path:剪切后的影像
|
||||
:param input_path:待剪切的影像
|
||||
:param shp_path:矢量数据
|
||||
:return: True or False
|
||||
"""
|
||||
r = shapefile.Reader(shp_path)
|
||||
box = r.bbox
|
||||
|
||||
input_dataset = gdal.Open(input_path)
|
||||
|
||||
gdal.Warp(output_path, input_dataset, format='GTiff', outputBounds=box, cutlineDSName=shp_path, dstNodata=-9999)
|
||||
# cutlineWhere="FIELD = ‘whatever’",
|
||||
# optionally you can filter your cutline (shapefile) based on attribute values
|
||||
# select the no data value you like
|
||||
# ds = None
|
||||
# do other stuff with ds object, it is your cropped dataset. in this case we only close the dataset.
|
||||
del input_dataset
|
||||
return True
|
||||
|
||||
def resampling_img(self, out_dir, para_names, img_paths, refer_img_path):
|
||||
"""
|
||||
以主影像为参考,对影像重采样
|
||||
:param para_names:需要检查的参数名称
|
||||
:param img_paths:待重采样影像路径
|
||||
:param refer_img_path:参考影像路径
|
||||
"""
|
||||
if len(para_names) == 0 or len(img_paths) == 0:
|
||||
return
|
||||
prepro_imgs_path = {}
|
||||
for name in para_names:
|
||||
img_path = img_paths[name]
|
||||
output_para = os.path.join(out_dir, name + '_preprocessed.tif') # + name + '_preprocessed.tif'
|
||||
self.resampling_by_scale(img_path, output_para, refer_img_path)
|
||||
prepro_imgs_path.update({name: output_para})
|
||||
logger.info('resampling %s success!', name)
|
||||
return prepro_imgs_path
|
||||
|
||||
@staticmethod
|
||||
def resampling_by_scale(input_path, target_file, refer_img_path):
|
||||
"""
|
||||
按照缩放比例对影像重采样
|
||||
:param input_path: GDAL地理数据路径
|
||||
:param target_file: 输出影像
|
||||
:param refer_img_path:参考影像
|
||||
:return: True or False
|
||||
"""
|
||||
ref_dataset = gdal.Open(refer_img_path)
|
||||
ref_cols = ref_dataset.RasterXSize # 列数
|
||||
ref_rows = ref_dataset.RasterYSize # 行数
|
||||
|
||||
target_dataset = gdal.Open(input_path)
|
||||
target_cols = target_dataset.RasterXSize # 列数
|
||||
target_rows = target_dataset.RasterYSize # 行数
|
||||
|
||||
if(ref_cols == target_cols) and (ref_rows == target_rows):
|
||||
shutil.copyfile(input_path, target_file)
|
||||
return True
|
||||
|
||||
dataset = gdal.Open(input_path)
|
||||
if dataset is None:
|
||||
logger.error('resampling_by_scale:dataset is None!')
|
||||
return False
|
||||
|
||||
band_count = dataset.RasterCount # 波段数
|
||||
if (band_count == 0) or (target_file == ""):
|
||||
logger.error("resampling_by_scale:Parameters of the abnormal!")
|
||||
return False
|
||||
|
||||
cols = dataset.RasterXSize # 列数
|
||||
rows = dataset.RasterYSize # 行数
|
||||
scale_x = ref_cols/cols
|
||||
scale_y = ref_rows/rows
|
||||
|
||||
# rows = dataset.RasterYSize # 行数
|
||||
# cols = int(cols * scale) # 计算新的行列数
|
||||
# rows = int(rows * scale)
|
||||
cols = ref_cols
|
||||
rows = ref_rows
|
||||
|
||||
geotrans = list(dataset.GetGeoTransform())
|
||||
geotrans[1] = geotrans[1] / scale_x # 像元宽度变为原来的scale倍
|
||||
geotrans[5] = geotrans[5] / scale_y # 像元高度变为原来的scale倍
|
||||
|
||||
if os.path.exists(target_file) and os.path.isfile(target_file): # 如果已存在同名影像
|
||||
os.remove(target_file) # 则删除之
|
||||
if not os.path.exists(os.path.split(target_file)[0]):
|
||||
os.makedirs(os.path.split(target_file)[0])
|
||||
|
||||
band1 = dataset.GetRasterBand(1)
|
||||
data_type = band1.DataType
|
||||
target = dataset.GetDriver().Create(target_file, xsize=cols, ysize=rows, bands=band_count,
|
||||
eType=data_type)
|
||||
target.SetProjection(dataset.GetProjection()) # 设置投影坐标
|
||||
target.SetGeoTransform(geotrans) # 设置地理变换参数
|
||||
total = band_count + 1
|
||||
for index in range(1, total):
|
||||
# 读取波段数据
|
||||
data = dataset.GetRasterBand(index).ReadAsArray(buf_xsize=cols, buf_ysize=rows)
|
||||
out_band = target.GetRasterBand(index)
|
||||
|
||||
no_data_value = dataset.GetRasterBand(index).GetNoDataValue() # 获取没有数据的点
|
||||
if not (no_data_value is None):
|
||||
out_band.SetNoDataValue(no_data_value)
|
||||
|
||||
out_band.WriteArray(data) # 写入数据到新影像中
|
||||
out_band.FlushCache()
|
||||
out_band.ComputeBandStats(False) # 计算统计信息
|
||||
del dataset
|
||||
del target
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def cv_mean_filter(out_path, in_path, filter_size):
|
||||
"""
|
||||
:param out_path:滤波后的影像
|
||||
:param in_path:滤波前的影像
|
||||
:param filter_size:滤波尺寸
|
||||
:return: True or False
|
||||
"""
|
||||
proj = ImageHandler.get_projection(in_path)
|
||||
geotrans = ImageHandler.get_geotransform(in_path)
|
||||
array = ImageHandler.get_band_array(in_path, 1)
|
||||
array = cv2.blur(array, (filter_size, filter_size)) # 均值滤波
|
||||
ImageHandler.write_img(out_path, proj, geotrans, array)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def check_LocalIncidenceAngle(out_tif_path, in_tif_path):
|
||||
"""
|
||||
将角度的无效值设置为nan,把角度值转为弧度值
|
||||
:param out_tif_path:处理后影像路径
|
||||
:param in_tif_path:处理前影像路径
|
||||
"""
|
||||
proj, geo, angle = ImageHandler.read_img(in_tif_path)
|
||||
angle = angle.astype(np.float32, order='C')
|
||||
angle[angle == -9999] = np.nan
|
||||
|
||||
mean = np.nanmean(angle)
|
||||
if mean > np.pi:
|
||||
angle = np.deg2rad(angle)# 角度转弧度
|
||||
|
||||
angle[np.where(angle >= 0.5 * np.pi)] = np.nan
|
||||
angle[np.where(angle < 0)] = np.nan
|
||||
|
||||
ImageHandler.write_img(out_tif_path, proj, geo, angle)
|
||||
|
||||
|
|
@ -1,237 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:microproduct
|
||||
@File:ROIAlg.py
|
||||
@Function:
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/11/17
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import logging
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.algtools.PreProcess import PreProcess as pp
|
||||
import numpy as np
|
||||
|
||||
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class ROIAlg:
|
||||
def __init__(self,):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def roi_process(names, processing_path, processing_paras, preprocessed_paras):
|
||||
roi_paths = []
|
||||
roi = ROIAlg()
|
||||
for name in names:
|
||||
if 'LocalIncidenceAngle' in name:
|
||||
# 利用角度为nan生成Mask
|
||||
pp.check_LocalIncidenceAngle(preprocessed_paras[name],preprocessed_paras[name])
|
||||
angle_nan_mask_path = processing_path + 'angle_nan_mask.tif'
|
||||
roi.trans_tif2mask(angle_nan_mask_path, preprocessed_paras[name], np.nan)
|
||||
roi_paths.append(angle_nan_mask_path)
|
||||
elif ("HH" in name) or ("HV" in name) or ("VH" in name) or ("VV" in name):
|
||||
# 利用影像的有效范围生成MASK
|
||||
tif_mask_path = processing_path + name + "_tif_mask.tif"
|
||||
roi.trans_tif2mask(tif_mask_path, preprocessed_paras[name], np.nan)
|
||||
roi_paths.append(tif_mask_path)
|
||||
elif name == 'Covering':
|
||||
# 利用cover计算植被覆盖范围
|
||||
if processing_paras['CoveringIDs'] == 'empty':
|
||||
cover_data = ImageHandler.get_data(preprocessed_paras[name])
|
||||
cover_data[np.where(np.isnan(cover_data))] = 0
|
||||
cover_id_list = list(np.unique(cover_data))
|
||||
else:
|
||||
cover_id_list = list(processing_paras['CoveringIDs'].split(';'))
|
||||
cover_id_list = [int(num) for num in cover_id_list]
|
||||
cover_mask_path = processing_path + "cover_mask.tif"
|
||||
roi.trans_cover2mask(cover_mask_path, preprocessed_paras[name], cover_id_list)
|
||||
roi_paths.append(cover_mask_path)
|
||||
elif name == "NDVI":
|
||||
# 利用NDVI计算裸土范围该指数的输出值在 -1.0 和 1.0 之间,大部分表示植被量,
|
||||
# 负值主要根据云、水和雪而生成
|
||||
# 接近零的值则主要根据岩石和裸土而生成。
|
||||
# 较低的(小于等于 0.1)NDVI 值表示岩石、沙石或雪覆盖的贫瘠区域。
|
||||
# 中等值(0.2 至 0.3)表示灌木丛和草地
|
||||
# 较高的值(0.6 至 0.8)表示温带雨林和热带雨林。
|
||||
ndvi_mask_path = processing_path + "ndvi_mask.tif"
|
||||
ndvi_scope = list(processing_paras['NDVIScope'].split(';'))
|
||||
threshold_of_ndvi_min = float(ndvi_scope[0])
|
||||
threshold_of_ndvi_max = float(ndvi_scope[1])
|
||||
roi.trans_tif2mask(ndvi_mask_path, preprocessed_paras[name], threshold_of_ndvi_min, threshold_of_ndvi_max)
|
||||
roi_paths.append(ndvi_mask_path)
|
||||
# else:
|
||||
# # 其他特征影像
|
||||
# tif_mask_path = processing_path + name + "_mask.tif"
|
||||
# roi.trans_tif2mask(tif_mask_path, preprocessed_paras[name], np.nan)
|
||||
# roi_paths.append(tif_mask_path)
|
||||
|
||||
bare_land_mask_path = processing_path + "bare_land_mask.tif"
|
||||
for roi_path in roi_paths:
|
||||
roi.combine_mask(bare_land_mask_path, roi_path, bare_land_mask_path)
|
||||
return bare_land_mask_path
|
||||
|
||||
@staticmethod
|
||||
def roi_process_VP(names, processing_path, processing_paras, preprocessed_paras, file_name):
|
||||
roi_paths = []
|
||||
roi = ROIAlg()
|
||||
for name in names:
|
||||
if 'LocalIncidenceAngle' in name:
|
||||
# 利用角度为nan生成Mask
|
||||
pp.check_LocalIncidenceAngle(preprocessed_paras[name], preprocessed_paras[name])
|
||||
angle_nan_mask_path = processing_path + 'angle_nan_mask.tif'
|
||||
roi.trans_tif2mask(angle_nan_mask_path, preprocessed_paras[name], np.nan)
|
||||
roi_paths.append(angle_nan_mask_path)
|
||||
elif ("HH" in name) or ("HV" in name) or ("VH" in name) or ("VV" in name):
|
||||
# 利用影像的有效范围生成MASK
|
||||
tif_mask_path = processing_path + name + "_tif_mask.tif"
|
||||
roi.trans_tif2mask(tif_mask_path, preprocessed_paras[name], np.nan)
|
||||
roi_paths.append(tif_mask_path)
|
||||
elif name == 'Covering':
|
||||
# 利用cover计算植被覆盖范围
|
||||
cover_mask_path = processing_path + "cover_mask.tif"
|
||||
if processing_paras['CoveringIDs'] == 'empty':
|
||||
cover_data = ImageHandler.get_data(preprocessed_paras[file_name + '_' + name])
|
||||
cover_data[np.where(np.isnan(cover_data))] = 0
|
||||
cover_id_list = list(np.unique(cover_data))
|
||||
else:
|
||||
cover_id_list = list(processing_paras['CoveringIDs'].split(';'))
|
||||
cover_id_list = [int(num) for num in cover_id_list]
|
||||
roi.trans_cover2mask(cover_mask_path, preprocessed_paras[file_name + '_' + name], cover_id_list)
|
||||
roi_paths.append(cover_mask_path)
|
||||
elif name == "NDVI":
|
||||
# 利用NDVI计算裸土范围该指数的输出值在 -1.0 和 1.0 之间,大部分表示植被量,
|
||||
# 负值主要根据云、水和雪而生成
|
||||
# 接近零的值则主要根据岩石和裸土而生成。
|
||||
# 较低的(小于等于 0.1)NDVI 值表示岩石、沙石或雪覆盖的贫瘠区域。
|
||||
# 中等值(0.2 至 0.3)表示灌木丛和草地
|
||||
# 较高的值(0.6 至 0.8)表示温带雨林和热带雨林。
|
||||
ndvi_mask_path = processing_path + "ndvi_mask.tif"
|
||||
ndvi_scope = list(processing_paras['NDVIScope'].split(';'))
|
||||
threshold_of_ndvi_min = float(ndvi_scope[0])
|
||||
threshold_of_ndvi_max = float(ndvi_scope[1])
|
||||
roi.trans_tif2mask(ndvi_mask_path, preprocessed_paras[name], threshold_of_ndvi_min,
|
||||
threshold_of_ndvi_max)
|
||||
roi_paths.append(ndvi_mask_path)
|
||||
# else:
|
||||
# # 其他特征影像
|
||||
# tif_mask_path = processing_path + name + "_mask.tif"
|
||||
# roi.trans_tif2mask(tif_mask_path, preprocessed_paras[name], np.nan)
|
||||
# roi_paths.append(tif_mask_path)
|
||||
|
||||
bare_land_mask_path = processing_path + "bare_land_mask.tif"
|
||||
for roi_path in roi_paths:
|
||||
roi.combine_mask(bare_land_mask_path, roi_path, bare_land_mask_path)
|
||||
return bare_land_mask_path
|
||||
|
||||
@staticmethod
|
||||
def trans_tif2mask(out_mask_path, in_tif_path, threshold_min, threshold_max = None):
|
||||
"""
|
||||
:param out_mask_path:mask输出路径
|
||||
:param in_tif_path:输入路径
|
||||
:param threshold_min:最小阈值
|
||||
:param threshold_max:最大阈值
|
||||
:return: True or False
|
||||
"""
|
||||
image_handler = ImageHandler()
|
||||
proj = image_handler.get_projection(in_tif_path)
|
||||
geotrans = image_handler.get_geotransform(in_tif_path)
|
||||
array = image_handler.get_band_array(in_tif_path, 1)
|
||||
if threshold_max == None and np.isnan(threshold_min)==True:
|
||||
nan = np.isnan(array)
|
||||
mask = (nan.astype(int) == 0).astype(int)
|
||||
mask1 = ((array == -9999).astype(int) == 0).astype(int)
|
||||
mask *= mask1
|
||||
image_handler.write_img(out_mask_path, proj, geotrans, mask)
|
||||
else:
|
||||
if threshold_min < threshold_max:
|
||||
mask = ((array > threshold_min) & (array < threshold_max)).astype(int)
|
||||
image_handler.write_img(out_mask_path, proj, geotrans, mask)
|
||||
elif threshold_min > threshold_max:
|
||||
mask = ((array < threshold_min) & (array > threshold_max)).astype(int)
|
||||
image_handler.write_img(out_mask_path, proj, geotrans, mask)
|
||||
elif threshold_max == threshold_min:
|
||||
mask = ((array == threshold_min).astype(int) == 0).astype(int)
|
||||
image_handler.write_img(out_mask_path, proj, geotrans, mask)
|
||||
|
||||
logger.info("trans_tif2mask success, path: %s", out_mask_path)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def trans_cover2mask(out_mask_path, in_tif_path, cover_id_list):
|
||||
"""
|
||||
:param out_mask_path:mask输出路径
|
||||
:param in_tif_path:输入路径
|
||||
:param cover_id_list 地表覆盖类型数据的id
|
||||
:return: True or False
|
||||
"""
|
||||
image_handler = ImageHandler()
|
||||
proj = image_handler.get_projection(in_tif_path)
|
||||
geotrans = image_handler.get_geotransform(in_tif_path)
|
||||
array = image_handler.get_band_array(in_tif_path, 1)
|
||||
|
||||
mask = np.zeros(array.shape, dtype=bool)
|
||||
for id in cover_id_list:
|
||||
mask_tmp = (array == id)
|
||||
mask = mask | mask_tmp
|
||||
|
||||
mask = mask.astype(int)
|
||||
image_handler.write_img(out_mask_path, proj, geotrans, mask)
|
||||
|
||||
@staticmethod
|
||||
def combine_mask(out_mask_path, in_main_mask_path, in_sub_mask_path):
|
||||
"""
|
||||
:param out_mask_path:输出路径
|
||||
:param in_main_mask_path:主mask路径,输出影像采用主mask的地理信息
|
||||
:param in_sub_mask_path:副mask路径
|
||||
"""
|
||||
image_handler = ImageHandler()
|
||||
proj = image_handler.get_projection(in_main_mask_path)
|
||||
geotrans = image_handler.get_geotransform(in_main_mask_path)
|
||||
main_array = image_handler.get_band_array(in_main_mask_path, 1)
|
||||
if image_handler.get_dataset(in_sub_mask_path) != None:
|
||||
sub_array = image_handler.get_band_array(in_sub_mask_path, 1)
|
||||
main_array = main_array * sub_array
|
||||
image_handler.write_img(out_mask_path, proj, geotrans, main_array)
|
||||
logger.info("combine_mask success, path: %s", out_mask_path)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def cal_roi(out_tif_path, in_tif_path, mask_path, background_value=1):
|
||||
"""
|
||||
:param out_tif_path:ROI的影像
|
||||
:param in_tif_path:计算ROI的影像
|
||||
:param mask_path:掩模
|
||||
:param background_value:无效区域设置的背景值
|
||||
:return: True or False
|
||||
"""
|
||||
image_handler = ImageHandler()
|
||||
proj = image_handler.get_projection(in_tif_path)
|
||||
geotrans = image_handler.get_geotransform(in_tif_path)
|
||||
tif_array = image_handler.get_data(in_tif_path) # 读取所有波段的像元值存为数组
|
||||
mask_array = image_handler.get_band_array(mask_path, 1)
|
||||
if len(tif_array.shape) == 3:
|
||||
im_bands, im_height, im_width = tif_array.shape
|
||||
else:
|
||||
im_bands, (im_height, im_width) = 1, tif_array.shape
|
||||
if im_bands == 1:
|
||||
tif_array[np.isnan(mask_array)] = background_value
|
||||
tif_array[mask_array == 0] = background_value
|
||||
elif im_bands>1:
|
||||
for i in range(0, im_bands):
|
||||
tif_array[i, :, :][np.isnan(mask_array)] = background_value
|
||||
tif_array[i, :, :][mask_array == 0] = background_value
|
||||
image_handler.write_img(out_tif_path, proj, geotrans, tif_array, background_value)
|
||||
logger.info("cal_roi success, path: %s", out_tif_path)
|
||||
return True
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# dir = r'G:\MicroWorkspace\C-SAR\SoilMoisture\Temporary\processing/'
|
||||
# out_tif_path = dir + 'soil_moisture_roi.tif'
|
||||
# in_tif_path = dir + 'soil_moisture.tif'
|
||||
# mask_path = dir + 'bare_land_mask.tif'
|
||||
# background_value = np.nan
|
||||
# ROIAlg.cal_roi(out_tif_path, in_tif_path, mask_path, background_value)
|
||||
# pass
|
|
@ -1,57 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:sieve_filter.py
|
||||
@Function:gdal斑点滤波功能
|
||||
@Contact: 'https://www.osgeo.cn/gdal/api/gdal_alg.html?highlight=gdalsievefilter#'
|
||||
'_CPPv415GDALSieveFilter15GDALRasterBandH15GDALRasterBandH15GDALRasterBandHiiPPc16GDALProgressFuncPv'
|
||||
@Author:SHJ
|
||||
@Date:2021/8/30 8:42
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import logging
|
||||
from osgeo import gdal
|
||||
import numpy as np
|
||||
# from onestar.soilMoisture.OneMoistureImage import ImageHandler
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
def gdal_sieve_filter(dst_filename, src_filename, threshold=100, connectedness=4):
|
||||
"""
|
||||
基于python GDAL栅格滤波
|
||||
:param dst_filename: 输出滤波后的影像
|
||||
:param src_filename: 输入需要处理的文件
|
||||
:param threshold: 滤波的值大小
|
||||
:param connectedness: 连通域, 范围:4或者8
|
||||
:return:
|
||||
"""
|
||||
# 4表示对角像素不被视为直接相邻用于多边形成员资格,8表示对角像素不相邻
|
||||
# connectedness = 4
|
||||
gdal.AllRegister()
|
||||
# print('需要处理滤波的栅格文件:{},阈值(分辨率):{}'.format(src_filename, threshold))
|
||||
dataset = gdal.Open(src_filename, gdal.GA_Update)
|
||||
if dataset is None:
|
||||
logger.error('{}open tif fail!'.format(src_filename))
|
||||
return False
|
||||
# 获取需要处理的源栅格波段
|
||||
src_band = dataset.GetRasterBand(1)
|
||||
mask_band = src_band.GetMaskBand()
|
||||
dst_band = src_band
|
||||
prog_func = gdal.TermProgress_nocb
|
||||
# 调用gdal滤波函数
|
||||
result = gdal.SieveFilter(src_band, mask_band, dst_band, threshold, connectedness, callback=prog_func)
|
||||
if result != 0:
|
||||
return False
|
||||
proj = dataset.GetProjection()
|
||||
geotransform = dataset.GetGeoTransform()
|
||||
dst_array = dst_band.ReadAsArray(0, 0, dst_band.XSize, dst_band.YSize)
|
||||
ImageHandler.write_img(dst_filename, proj, geotransform, dst_array)
|
||||
del dataset
|
||||
return True
|
||||
|
||||
#
|
||||
# if __name__ == '__main__':
|
||||
# inputfile = r'D:\DATA\testdata\srcimg\GLCFCS30_E110N25.tif'
|
||||
# outputfile = r'D:\DATA\testdata\srcimg\GLCFCS30_E110N25_sieve_filter.tif'
|
||||
# flag = gdal_sieve_filter(outputfile, inputfile, threshold=100, connectedness=4)
|
|
@ -1,122 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :ScatteringAuxData.py
|
||||
@Function :后向散射
|
||||
@Author :SHJ
|
||||
@Contact:
|
||||
@Date :2022/6/29
|
||||
@Version :1.0.0
|
||||
修改历史:
|
||||
[修改序列] [修改日期] [修改者] [修改内容]
|
||||
1 2022-6-29 石海军 1.兼容GF3元文件和正射校正元文件提取信息
|
||||
"""
|
||||
import logging
|
||||
from xml.etree.ElementTree import ElementTree
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
class GF3L1AMetaData:
|
||||
def __init__(self):
|
||||
pass
|
||||
@staticmethod
|
||||
def get_QualifyValue(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
QualifyValue = float(root.find('imageinfo').find('QualifyValue').find(polarization).text)
|
||||
return QualifyValue
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_Kdb(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
Kdb = float(root.find('processinfo').find('CalibrationConst').find(polarization).text)
|
||||
return Kdb
|
||||
|
||||
class OrthoMetaData:
|
||||
def __init__(self):
|
||||
pass
|
||||
@staticmethod
|
||||
def get_QualifyValue(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
QualifyValue = float(root.find('l1aInfo').find('imageinfo').find('QualifyValue').find(polarization).text)
|
||||
return QualifyValue
|
||||
|
||||
@staticmethod
|
||||
def get_Kdb(meta_file_path, polarization):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
Kdb = float(root.find('l1aInfo').find('processinfo').find('CalibrationConst').find(polarization).text)
|
||||
return Kdb
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_RadarCenterFrequency(meta_file_path):
|
||||
# 获取微波中心频率
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
RadarCenterFrequency = float(root.find('sensor').find('RadarCenterFrequency').text)
|
||||
return RadarCenterFrequency
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_lamda(meta_file_path):
|
||||
# 获取微波波长,单位:m
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
lamda = float(root.find('sensor').find('lamda').text)
|
||||
return lamda
|
||||
|
||||
class ScatteringAuxData:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_QualifyValue(meta_file_path, polarization):
|
||||
try:
|
||||
QualifyValue = OrthoMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
except Exception:
|
||||
logger.warning('OrthoMetaData.get_QualifyValue() error!')
|
||||
QualifyValue = GF3L1AMetaData.get_QualifyValue(meta_file_path, polarization)
|
||||
logger.info('GF3L1AMetaData.get_QualifyValue() success!')
|
||||
return QualifyValue
|
||||
|
||||
@staticmethod
|
||||
def get_Kdb(meta_file_path, polarization):
|
||||
try:
|
||||
Kdb = OrthoMetaData.get_Kdb(meta_file_path, polarization)
|
||||
except Exception:
|
||||
logger.warning('OrthoMetaData.get_Kdb() error!')
|
||||
Kdb = GF3L1AMetaData.get_Kdb(meta_file_path, polarization)
|
||||
logger.info('GF3L1AMetaData.get_Kdb() success!')
|
||||
return Kdb
|
||||
|
||||
@staticmethod
|
||||
def get_RadarCenterFrequency(meta_file_path):
|
||||
# 获取微波中心频率,单位GHz
|
||||
RadarCenterFrequency = OrthoMetaData.get_RadarCenterFrequency(meta_file_path)
|
||||
return RadarCenterFrequency
|
||||
|
||||
@staticmethod
|
||||
def get_lamda(meta_file_path):
|
||||
# 获取微波波长,单位:m
|
||||
lamda = OrthoMetaData.get_lamda(meta_file_path)
|
||||
return lamda
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# A = ScatteringAuxData()
|
||||
# dir = 'G:\MicroWorkspace\C-SAR\AuxSAR\GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485_geo/'
|
||||
# path = dir + 'GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485.meta.xml'
|
||||
# path1 = dir + 'OrthoProduct.meta.xml'
|
||||
# t1 = A.get_QualifyValue(path, 'HH')
|
||||
# t2 = A.get_Kdb(path, 'HH')
|
||||
# t3 = A.get_RadarCenterFrequency(path)
|
||||
# t4 = A.get_lamda(path)
|
||||
# pass
|
|
@ -1,414 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :CalculateIncident.py
|
||||
@Function :计算、局部入射角计算
|
||||
@Author :LMM
|
||||
@Date :2021/8/25 14:17
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import os
|
||||
import numpy as np
|
||||
from osgeo import gdal
|
||||
from osgeo import gdalconst
|
||||
import gc
|
||||
import math
|
||||
from xml.dom import minidom # 不需要安装,默认环境里就有
|
||||
|
||||
|
||||
class CalculateIncident:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def add_round(npgrid):
|
||||
"""
|
||||
边缘填充一圈,然后输出填充得到的矩阵
|
||||
param:npgrid dem数组
|
||||
"""
|
||||
ny, nx = npgrid.shape # ny:行数,nx:列数
|
||||
zbc = np.zeros((ny + 2, nx + 2))
|
||||
zbc[1:-1, 1:-1] = npgrid
|
||||
# 四边
|
||||
zbc[0, 1:-1] = npgrid[0, :]
|
||||
zbc[-1, 1:-1] = npgrid[-1, :]
|
||||
zbc[1:-1, 0] = npgrid[:, 0]
|
||||
zbc[1:-1, -1] = npgrid[:, -1]
|
||||
# 角点
|
||||
zbc[0, 0] = npgrid[0, 0]
|
||||
zbc[0, -1] = npgrid[0, -1]
|
||||
zbc[-1, 0] = npgrid[-1, 0]
|
||||
zbc[-1, -1] = npgrid[-1, -1]
|
||||
print("输出填充后的数组的形状", zbc.shape)
|
||||
return zbc
|
||||
|
||||
@staticmethod
|
||||
def cal_dxdy(zbc, dx):
|
||||
"""
|
||||
计算dx,dy
|
||||
param:zbc填充后的数组
|
||||
param:dx dem数据像元大小
|
||||
|
||||
"""
|
||||
we_x = ((zbc[1:-1, :-2]) - (zbc[1:-1, 2:])) / dx / 2 # WE方向
|
||||
ns_y = ((zbc[2:, 1:-1]) - (zbc[:-2, 1:-1])) / dx / 2 # NS方向
|
||||
print("输出Sx的数组的形状", we_x.shape, "输出Sy的数组的形状", ns_y.shape)
|
||||
sx = we_x[1:-1, 1:-1]
|
||||
sy = ns_y[1:-1, 1:-1]
|
||||
# np.savetxt("dxdy.csv",dx,delimiter=",")
|
||||
print("输出Sx2的数组的形状", sx.shape, "输出Sy2的数组的形状", sy.shape)
|
||||
return sx, sy
|
||||
|
||||
@staticmethod
|
||||
def cal_slopasp(dx, dy):
|
||||
# 计算坡度\坡向
|
||||
# 坡度计算 slope
|
||||
slope = (np.arctan(np.sqrt(dx * dx + dy * dy))) * 57.29578 # 转换成°,57.29578=180/math.pi
|
||||
slope = slope[1:-1, 1:-1]
|
||||
# 坡向计算 aspect
|
||||
aspect = np.ones([dx.shape[0], dx.shape[1]]).astype(np.float32) # 生成一个全是0的数组
|
||||
|
||||
# dx = dx.astype(np.float32)
|
||||
# dy = dy.astype(np.float32)
|
||||
# a1=(np.where(dx==0) and np.where(dy ==0))
|
||||
# print(a1)
|
||||
# aspect[a1]=-1
|
||||
# a2 = (np.where(dx == 0) and np.where(dy > 0))
|
||||
# aspect[a2] =0.0
|
||||
# a3 = (np.where(dx == 0) and np.where(dy <0))
|
||||
# aspect[a3] =180.0
|
||||
# a4 = (np.where(dx > 0) and np.where(dy ==0))
|
||||
# aspect[a4] =90.0
|
||||
# a5 = (np.where(dx < 0) and np.where(dy ==0))
|
||||
# aspect[a5] =270.0
|
||||
# a6 = (np.where(dx != 0) or np.where(dy !=0))
|
||||
# b=dy[a6]
|
||||
# print(":", 1)
|
||||
# aspect[a6] =float(math.atan2(dy[i, j], dx[i, j])) * 57.29578
|
||||
# a7=np.where(aspect[a6]< 0.0)
|
||||
# aspect[a7] = 90.0 - aspect[a7]
|
||||
# a8=np.where(aspect[a6]> 90.0)
|
||||
# aspect[a8] = 450.0- aspect[a8]
|
||||
# a9 =np.where(aspect[a6] >= 0 or aspect[a6] <= 90)
|
||||
# aspect[a9] =90.0 - aspect[a9]
|
||||
|
||||
for i in range(dx.shape[0]):
|
||||
for j in range(dx.shape[1]):
|
||||
x = float(dx[i, j])
|
||||
y = float(dy[i, j])
|
||||
if (x == 0.0) & (y == 0.0):
|
||||
aspect[i, j] = -1
|
||||
elif x == 0.0:
|
||||
if y > 0.0:
|
||||
aspect[i, j] = 0.0
|
||||
else:
|
||||
aspect[i, j] = 180.0
|
||||
elif y == 0.0:
|
||||
if x > 0.0:
|
||||
aspect[i, j] = 90.0
|
||||
else:
|
||||
aspect[i, j] = 270.0
|
||||
else:
|
||||
aspect[i, j] = float(math.atan2(y, x)) * 57.29578 # 范围(-Π/2,Π/2)
|
||||
if aspect[i, j] < 0.0:
|
||||
aspect[i, j] = 90.0 - aspect[i, j]
|
||||
elif aspect[i, j] > 90.0:
|
||||
aspect[i, j] = 450.0 - aspect[i, j]
|
||||
else:
|
||||
aspect[i, j] = 90.0 - aspect[i, j]
|
||||
print("输出aspect形状:", aspect.shape) # 3599, 3599
|
||||
print("输出aspect:", aspect)
|
||||
return slope, aspect
|
||||
|
||||
def creat_twofile(self, dem_file_path, slope_out_path, aspect_out_path):
|
||||
"""
|
||||
生成坡度图、坡向图
|
||||
param: path_file1 为输入文件tif数据的文件路径
|
||||
|
||||
"""
|
||||
if os.path.isfile(dem_file_path):
|
||||
print("高程数据文件存在")
|
||||
else:
|
||||
print("高程数据文件不存在")
|
||||
|
||||
dataset_caijian = gdal.Open(dem_file_path)
|
||||
x_size = dataset_caijian.RasterXSize
|
||||
y_size = dataset_caijian.RasterYSize
|
||||
geo = dataset_caijian.GetGeoTransform()
|
||||
pro = dataset_caijian.GetProjection()
|
||||
array0 = dataset_caijian.ReadAsArray(0, 0, x_size, y_size)
|
||||
print("输出dem数据的数组", array0)
|
||||
zbc = self.add_round(array0)
|
||||
sx, sy = self.cal_dxdy(zbc, 30)
|
||||
slope, aspect = self.cal_slopasp(sx, sy)
|
||||
|
||||
driver = gdal.GetDriverByName("GTiff") # 创建一个数据格式
|
||||
driver.Register()
|
||||
newfile = driver.Create(slope_out_path, x_size, y_size, 1, gdal.GDT_Float32) # 存放路径文件名,长,宽,波段,数据类型
|
||||
newfile.SetProjection(pro)
|
||||
geo = [geo[0], geo[1], 0, geo[3], 0, -geo[1]]
|
||||
newfile.SetGeoTransform(geo)
|
||||
newfile.GetRasterBand(1).WriteArray(slope)
|
||||
|
||||
driver2 = gdal.GetDriverByName("GTiff") # 创建一个数据格式
|
||||
driver2.Register()
|
||||
newfile2 = driver2.Create(aspect_out_path, x_size, y_size, 1, gdal.GDT_Float32) # 存放路径文件名,长,宽,波段,数据类型
|
||||
geo = [geo[0], geo[1], 0, geo[3], 0, -geo[1]]
|
||||
newfile2.SetGeoTransform(geo)
|
||||
newfile2.GetRasterBand(1).WriteArray(aspect)
|
||||
|
||||
@staticmethod
|
||||
def resampling(input_file1, input_file2, ref_file, output_file, output_file2):
|
||||
"""
|
||||
采用gdal.Warp()方法进行重采样,差值法为双线性插值
|
||||
:param input_file1 slope path
|
||||
:param input_file2 aspect path
|
||||
:param ref_file: 参考图像路径
|
||||
:param output_file: slope path
|
||||
:param output_file2 aspect path
|
||||
:return:
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
in_ds1 = gdal.Open(input_file1)
|
||||
in_ds2 = gdal.Open(input_file2)
|
||||
ref_ds = gdal.Open(ref_file, gdal.GA_ReadOnly)
|
||||
|
||||
# 获取输入影像信息
|
||||
input_file_proj = in_ds1.GetProjection()
|
||||
# inputefileTrans = in_ds1.GetGeoTransform()
|
||||
reference_file_proj = ref_ds.GetProjection()
|
||||
reference_file_trans = ref_ds.GetGeoTransform()
|
||||
|
||||
nbands = in_ds1.RasterCount
|
||||
bandinputfile1 = in_ds1.GetRasterBand(1)
|
||||
bandinputfile2 = in_ds2.GetRasterBand(1)
|
||||
x = ref_ds.RasterXSize
|
||||
y = ref_ds.RasterYSize
|
||||
|
||||
# 创建重采样输出文件(设置投影及六参数)
|
||||
driver1 = gdal.GetDriverByName('GTiff')
|
||||
output1 = driver1.Create(output_file, x, y, nbands, bandinputfile1.DataType)
|
||||
output1.SetGeoTransform(reference_file_trans)
|
||||
output1.SetProjection(reference_file_proj)
|
||||
# options = gdal.WarpOptions(srcSRS=inputProj, dstSRS=referencefileProj, resampleAlg=gdalconst.GRA_Bilinear)
|
||||
# resampleAlg = gdalconst.GRA_NearestNeighbour
|
||||
gdal.ReprojectImage(in_ds1, output1, input_file_proj, reference_file_proj, gdalconst.GRA_Bilinear)
|
||||
|
||||
driver2 = gdal.GetDriverByName('GTiff')
|
||||
output2 = driver2.Create(output_file2, x, y, nbands, bandinputfile2.DataType)
|
||||
output2.SetGeoTransform(reference_file_trans)
|
||||
output2.SetProjection(reference_file_proj)
|
||||
# options = gdal.WarpOptions(srcSRS=inputProj, dstSRS=referencefileProj, resampleAlg=gdalconst.GRA_Bilinear)
|
||||
# resampleAlg = gdalconst.GRA_NearestNeighbour
|
||||
gdal.ReprojectImage(in_ds2, output2, input_file_proj, reference_file_proj, gdalconst.GRA_Bilinear)
|
||||
|
||||
@staticmethod
|
||||
def getorbitparameter(xml_path):
|
||||
"""
|
||||
从轨道参数文件xml中获取升降轨信息、影像四个角的经纬度坐标
|
||||
|
||||
"""
|
||||
# 打开xml文档,根据路径初始化DOM
|
||||
doc = minidom.parse(xml_path)
|
||||
# 得到xml文档元素对象,初始化root对象
|
||||
root = doc.documentElement
|
||||
|
||||
# 输出升降轨信息,DEC降轨,ASC升轨
|
||||
direction = root.getElementsByTagName("Direction")[0]
|
||||
# print("输出Direction的子节点列表",Direction.firstChild.data)
|
||||
pd = direction.firstChild.data
|
||||
|
||||
imageinfo = root.getElementsByTagName("imageinfo")[0]
|
||||
# 输出topLeft的纬度和经度
|
||||
top_left = imageinfo.getElementsByTagName("topLeft")[0]
|
||||
latitude = top_left.getElementsByTagName("latitude")[0]
|
||||
longitude = top_left.getElementsByTagName("longitude")[0]
|
||||
# print("输出topLeft的纬度lat和经度lon:", latitude.firstChild.data,longitude.firstChild.data)
|
||||
tl_lat, tl_lon = latitude.firstChild.data, longitude.firstChild.data
|
||||
|
||||
# 输出topRight的纬度和经度
|
||||
top_right = imageinfo.getElementsByTagName("topRight")[0]
|
||||
latitude = top_right.getElementsByTagName("latitude")[0]
|
||||
longitude = top_right.getElementsByTagName("longitude")[0]
|
||||
# print("输出topRight的纬度lat和经度lon:", latitude.firstChild.data,longitude.firstChild.data)
|
||||
tr_lat, tr_lon = latitude.firstChild.data, longitude.firstChild.data
|
||||
|
||||
# 输出 bottomLeft的纬度和经度
|
||||
bottom_left = imageinfo.getElementsByTagName("bottomLeft")[0]
|
||||
latitude = bottom_left.getElementsByTagName("latitude")[0]
|
||||
longitude = bottom_left.getElementsByTagName("longitude")[0]
|
||||
# print("输出bottomLeft的纬度lat和经度lon:", latitude.firstChild.data,longitude.firstChild.data)
|
||||
bl_lat, bl_lon = latitude.firstChild.data, longitude.firstChild.data
|
||||
|
||||
# 输出 bottomRight的纬度和经度
|
||||
bottom_right = imageinfo.getElementsByTagName("bottomRight")[0]
|
||||
latitude = bottom_right.getElementsByTagName("latitude")[0]
|
||||
longitude = bottom_right.getElementsByTagName("longitude")[0]
|
||||
# print("输出bottomRight的纬度lat和经度lon:", latitude.firstChild.data,longitude.firstChild.data)
|
||||
br_lat, br_lon = latitude.firstChild.data, longitude.firstChild.data
|
||||
print("pd,tl_lat,tl_lon,tr_lat,tr_lon,bl_lat,bl_lon,br_lat,br_lon", pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat,
|
||||
bl_lon, br_lat, br_lon)
|
||||
return pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat, bl_lon, br_lat, br_lon
|
||||
|
||||
def get_rparademeter(self, xml_path):
|
||||
"""
|
||||
计算雷达视线向方向角R
|
||||
"""
|
||||
pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat, bl_lon, br_lat, br_lon = self.getorbitparameter(xml_path)
|
||||
|
||||
tl_lat = float(tl_lat) # 原来的数是带有小数点的字符串,int会报错,使用float
|
||||
tl_lon = float(tl_lon)
|
||||
# tr_lat = float(tr_lat)
|
||||
# tr_lon = float(tr_lon)
|
||||
bl_lat = float(bl_lat)
|
||||
bl_lon = float(bl_lon)
|
||||
# br_lat = float(br_lat)
|
||||
# br_lon = float(br_lon)
|
||||
|
||||
if pd == "DEC":
|
||||
# 降轨
|
||||
b = np.arctan((tl_lat - bl_lat) / (tl_lon - bl_lon)) * 57.29578
|
||||
r = 270 + b
|
||||
return r
|
||||
# tl_lat, tl_lon = lonlat2geo(tl_lat, tl_lon)
|
||||
# tr_lat, tr_lon = lonlat2geo(tr_lat, tr_lon)
|
||||
# bl_lat, bl_lon = lonlat2geo(bl_lat, bl_lon)
|
||||
# br_lat, br_lon = lonlat2geo(br_lat, br_lon)
|
||||
# B2 = np.arctan((tl_lat - bl_lat) / (tl_lon - bl_lon)) * 57.29578
|
||||
# R2 = 270 + B2
|
||||
# print(("输出R2:", R2))
|
||||
if pd == "ASC":
|
||||
# 升轨
|
||||
b = np.arctan((tl_lat - bl_lat) / (tl_lon - bl_lon)) * 57.29578
|
||||
return b
|
||||
|
||||
def clau(self, pathfile1, pathfile2, pathfile3, xml_path, save_localangle_path):
|
||||
"""
|
||||
计算局部入射角
|
||||
param: pathfile1是slope的坡度图路径
|
||||
param: pathfile2是aspect的坡向图路径
|
||||
param: pathfile3是入射角文件的路径
|
||||
param: xml_path是轨道参数文件
|
||||
r是雷达视线向方位角
|
||||
"""
|
||||
r = self.get_rparademeter(xml_path)
|
||||
pd, tl_lat, tl_lon, tr_lat, tr_lon, bl_lat, bl_lon, br_lat, br_lon = self.getorbitparameter(xml_path)
|
||||
print("输出升降轨:", pd)
|
||||
dataset = gdal.Open(pathfile1)
|
||||
x = dataset.RasterXSize
|
||||
y = dataset.RasterYSize
|
||||
print("输出slope的行、列:", x, y)
|
||||
slope_array = dataset.ReadAsArray(0, 0, x, y)
|
||||
|
||||
dataset2 = gdal.Open(pathfile2)
|
||||
x2 = dataset2.RasterXSize
|
||||
y2 = dataset2.RasterYSize
|
||||
print("输出aspect的行、列:", x2, y2)
|
||||
aspect_array = dataset2.ReadAsArray(0, 0, x2, y2)
|
||||
|
||||
dataset3 = gdal.Open(pathfile3)
|
||||
x3 = dataset3.RasterXSize
|
||||
y3 = dataset3.RasterYSize
|
||||
geo3 = dataset3.GetGeoTransform()
|
||||
pro3 = dataset3.GetProjection()
|
||||
print("输出入射角文件的行、列:", x3, y3)
|
||||
|
||||
rushe_array = dataset3.ReadAsArray(0, 0, x3, y3)
|
||||
# b0 = np.where(rushe_array > 0.00001, 0, 1)
|
||||
radina_value = 0
|
||||
if pd == "DEC":
|
||||
# 降轨数据
|
||||
# 雷达视线角-坡度角在90度到270度之间
|
||||
where_0 = np.where(rushe_array == 0)
|
||||
|
||||
bb1 = (r-aspect_array).all() and (r-aspect_array).all()
|
||||
bb2 = np.where(90 < bb1 < 270, 1, 0)
|
||||
b1 = (bb1 and bb2)
|
||||
# b1 = np.where(90 < ((r-aspect_array).all()) and ((r-aspect_array).all()) < 270, 1, 0)
|
||||
c1 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) - np.sin(slope_array*(math.pi/180)) * np.sin(
|
||||
rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
|
||||
d1 = b1 * c1
|
||||
# 雷达视线角-坡度角=90度或=270度时
|
||||
b2 = np.where((r-aspect_array == 90) | (r-aspect_array == 270), 1, 0)
|
||||
d2 = b2*c1
|
||||
# 雷达视线角-坡度角在90度到270度之间
|
||||
b3 = 1-b1-b2
|
||||
c3 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) + np.sin(
|
||||
slope_array*(math.pi/180)) * np.sin(rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
|
||||
d3 = b3 * c3
|
||||
del b1, b2, b3, c3, c1
|
||||
gc.collect()
|
||||
radina_value = d1 + d2 + d3
|
||||
radina_value[where_0] = 0
|
||||
del d1, d2, d3
|
||||
gc.collect()
|
||||
if pd == "ASC":
|
||||
# 升轨数据
|
||||
# 坡度-雷达视线角在90度到270度之间
|
||||
where_0 = np.where(rushe_array == 0)
|
||||
|
||||
bb1 = (r-aspect_array).all() and (r-aspect_array).all()
|
||||
bb2 = np.where(90 < bb1 < 270, 1, 0)
|
||||
b1 = (bb1 and bb2)
|
||||
# b1 = np.where(90 < ((r-aspect_array).all()) and ((r-aspect_array).all()) < 270, 1, 0)
|
||||
c1 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) + np.sin(
|
||||
slope_array*(math.pi/180)) * np.sin(rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
|
||||
d1 = b1 * c1
|
||||
# 坡度-雷达视线角=90或=270时
|
||||
b2 = np.where((aspect_array-r == 90) | (aspect_array-r == 270), 1, 0)
|
||||
d2 = b2 * c1
|
||||
# 坡度-雷达视线角在0-90度或270-360度之间
|
||||
b3 = 1 - b1-b2
|
||||
c3 = np.cos(rushe_array*(math.pi/180)) * np.cos(slope_array*(math.pi/180)) - np.sin(slope_array*(math.pi/180)) *\
|
||||
np.sin(rushe_array*(math.pi/180)) * np.cos((r - aspect_array)*(math.pi/180))
|
||||
d3 = b3 * c3
|
||||
radina_value = d1 + d2 + d3
|
||||
radina_value[where_0] = 0
|
||||
del b1, b2, b3, c3, c1, d1, d2, d3
|
||||
gc.collect()
|
||||
jubu_o = 57.29578 * np.arccos(radina_value)
|
||||
print("输出局部入射角", jubu_o)
|
||||
driver = gdal.GetDriverByName("GTiff") # 创建一个数据格式
|
||||
driver.Register()
|
||||
newfile = driver.Create(save_localangle_path, x3, y3, 1, gdal.GDT_Float32) # 存放路径文件名,长,宽,波段,数据类型
|
||||
newfile.SetProjection(pro3)
|
||||
newfile.SetGeoTransform(geo3)
|
||||
newfile.GetRasterBand(1).WriteArray(jubu_o)
|
||||
|
||||
def localangle(self, dem_path, incidence_angle_path, orbital_parameters_path):
|
||||
"""
|
||||
获取输入文件的路径
|
||||
计算坡度图、坡向图
|
||||
计算局部入射角
|
||||
"""
|
||||
para_names = ["Dem", "IncidenceAngle", "OrbitalParameters", "经验A"]
|
||||
if len(para_names) == 0:
|
||||
return False
|
||||
# 获取三个文件的路径
|
||||
|
||||
# print("输出三个文件路径",Dem_path,IncidenceAngle_path,OrbitalParameters_path)
|
||||
# 确定坡度、坡向的输出路径,输出坡度、坡向图
|
||||
slope_out_path = r"D:\MicroWorkspace\LeafAreaIndex\Temporary\UnClipslope.tif"
|
||||
aspect_out_path = r"D:\MicroWorkspace\LeafAreaIndex\Temporary\UnClipaspect.tif"
|
||||
print("slope_out_path的路径是", slope_out_path)
|
||||
print("aspect_out_path的路径是", aspect_out_path)
|
||||
self.creat_twofile(dem_path, slope_out_path, aspect_out_path)
|
||||
# 根据入射角文件对坡度坡向图进行裁剪与重采样
|
||||
slope_out_path2 = r"D:\MicroWorkspace\LocaLangle\Temporary\Clipslope.tif"
|
||||
aspect_out_path2 = r"D:\MicroWorkspace\LocaLangle\Temporary\Clipaspect.tif"
|
||||
self.resampling(slope_out_path, aspect_out_path, incidence_angle_path, slope_out_path2, aspect_out_path2)
|
||||
|
||||
# 输出局部入射角文件
|
||||
save_localangle_path = r"D:\\MicroWorkspace\\LocaLangle\\Temporary\\\localangle.tif"
|
||||
self.clau(slope_out_path2, aspect_out_path2, incidence_angle_path,
|
||||
orbital_parameters_path, save_localangle_path)
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# calu_incident = CalculateIncident()
|
||||
# Dem_path = "D:\\MicroWorkspace\\LocaLangle\\Input\\dem.tif"
|
||||
# IncidenceAngle_path = "D:\\MicroWorkspace\\LocaLangle\\Input\\RSJ.tif"
|
||||
# OrbitalParameters_path = "D:\\MicroWorkspace\\LocaLangle\\Input\\" \
|
||||
# "GF3_KAS_FSII_020008_E113.2_N23.1_20200528_L1A_HHHV_L10004829485.meta.xml"
|
||||
# calu_incident.localangle(Dem_path, IncidenceAngle_path, OrbitalParameters_path)
|
||||
# print('done')
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,7 +0,0 @@
|
|||
2023-02-17 06:06:22,434 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
||||
2023-02-17 06:13:33,366 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
||||
2023-02-17 06:43:15,623 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
||||
2023-02-17 06:46:12,386 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
||||
2023-02-17 07:58:59,262 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
||||
2023-02-17 07:59:05,563 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
||||
2023-02-17 07:59:28,169 - pyproj - DEBUG - PROJ_ERROR: proj_context_set_database_path: /cygdrive/e/soft/Anaconda/envs/micro/Library/share/proj/proj.db lacks DATABASE.LAYOUT.VERSION.MAJOR / DATABASE.LAYOUT.VERSION.MINOR metadata. It comes from another PROJ installation.
|
|
@ -1,302 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:lee_filter.py
|
||||
@Function:lee_filter
|
||||
@Contact: https://github.com/PyRadar/pyradar
|
||||
@Author:SHJ
|
||||
@Date:2021/8/30 8:42
|
||||
@Version:1.0.0
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
import math
|
||||
from PIL import Image
|
||||
import multiprocessing
|
||||
import multiprocessing
|
||||
from tool.algorithm.block.blockprocess import BlockProcess
|
||||
import logging
|
||||
import shutil
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.file.fileHandle import fileHandle
|
||||
from tool.algorithm.algtools.filter import lee_Filter_c as lee_Filter_c
|
||||
logger = logging.getLogger("mylog")
|
||||
file =fileHandle(False)
|
||||
COEF_VAR_DEFAULT = 0.01
|
||||
CU_DEFAULT = 0.25
|
||||
import os
|
||||
|
||||
|
||||
class Filter:
|
||||
def __int__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def assert_window_size(win_size):
|
||||
"""
|
||||
Asserts invalid window size.
|
||||
Window size must be odd and bigger than 3.
|
||||
"""
|
||||
assert win_size >= 3, 'ERROR: win size must be at least 3'
|
||||
|
||||
if win_size % 2 == 0:
|
||||
print('It is highly recommended to user odd window sizes.'
|
||||
'You provided %s, an even number.' % (win_size, ))
|
||||
|
||||
@staticmethod
|
||||
def assert_indices_in_range(width, height, xleft, xright, yup, ydown):
|
||||
"""
|
||||
Asserts index out of image range.
|
||||
"""
|
||||
|
||||
# assert xleft >= 0 and xleft <= width, \
|
||||
assert 0 <= xleft <= width, \
|
||||
"index xleft:%s out of range (%s<= xleft < %s)" % (xleft, 0, width)
|
||||
|
||||
# assert xright >= 0 and xright <= width, \
|
||||
assert 0 <= xright <= width, "index xright:%s out of range (%s<= xright < %s)" % (xright, 0, width)
|
||||
|
||||
# assert yup >= 0 and yup <= height, \
|
||||
assert 0 <= yup <= height, "index yup:%s out of range. (%s<= yup < %s)" % (yup, 0, height)
|
||||
|
||||
# assert ydown >= 0 and ydown <= height, \
|
||||
assert 0 <= ydown <= height, "index ydown:%s out of range. (%s<= ydown < %s)" % (ydown, 0, height)
|
||||
|
||||
@staticmethod
|
||||
def weighting(window, cu=CU_DEFAULT):
|
||||
"""
|
||||
Computes the weighthing function for Lee filter using cu as the noise
|
||||
coefficient.
|
||||
"""
|
||||
# cu is the noise variation coefficient
|
||||
two_cu = cu * cu
|
||||
|
||||
# ci is the variation coefficient in the window
|
||||
window_mean = window.mean()
|
||||
window_std = window.std()
|
||||
ci = window_std / window_mean
|
||||
|
||||
two_ci = ci * ci
|
||||
|
||||
if not two_ci: # dirty patch to avoid zero division
|
||||
two_ci = COEF_VAR_DEFAULT
|
||||
|
||||
if cu > ci:
|
||||
w_t = 0.0
|
||||
else:
|
||||
w_t = 1.0 - (two_cu / two_ci)
|
||||
|
||||
return w_t
|
||||
|
||||
def lee_filter(self, in_path, out_path, win_size):
|
||||
"""
|
||||
Apply lee to a numpy matrix containing the image, with a window of
|
||||
win_size x win_size.
|
||||
"""
|
||||
cu = CU_DEFAULT
|
||||
self.assert_window_size(win_size)
|
||||
# img = self.ImageHandler.get_band_array(img, 1)
|
||||
array1 = Image.open(in_path)
|
||||
img = np.array(array1)
|
||||
# we process the entire img as float64 to avoid type overflow error
|
||||
img = np.float64(img)
|
||||
img_filtered = np.zeros_like(img)
|
||||
# n, m = img.shape
|
||||
# win_offset = win_size / 2
|
||||
#
|
||||
# for i in range(0, n):
|
||||
# xleft = i - win_offset
|
||||
# xright = i + win_offset
|
||||
#
|
||||
# if xleft < 0:
|
||||
# xleft = 0
|
||||
# if xright >= n:
|
||||
# xright = n
|
||||
#
|
||||
# for j in range(0, m):
|
||||
# yup = j - win_offset
|
||||
# ydown = j + win_offset
|
||||
#
|
||||
# if yup < 0:
|
||||
# yup = 0
|
||||
# if ydown >= m:
|
||||
# ydown = m
|
||||
#
|
||||
# self.assert_indices_in_range(n, m, xleft, xright, yup, ydown)
|
||||
#
|
||||
# pix_value = img[i, j]
|
||||
#
|
||||
# window = img[math.ceil(xleft):int(xright)+1, math.ceil(yup):int(ydown)+1]
|
||||
# w_t = self.weighting(window, cu)
|
||||
# window_mean = window.mean()
|
||||
# new_pix_value = (pix_value * w_t) + (window_mean * (1.0 - w_t))
|
||||
#
|
||||
# if not new_pix_value > 0:
|
||||
# new_pix_value = 0
|
||||
# img_filtered[i, j] = round(new_pix_value)
|
||||
# # return img_filtered
|
||||
self.lee_filter_array(img, img_filtered, win_size)
|
||||
out_image = Image.fromarray(img_filtered)
|
||||
out_image.save(out_path)
|
||||
print("lee_filter finish! path:" + out_path)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def lee_filter_array(in_arry, out_arry, win_size):
|
||||
"""
|
||||
Apply lee to a numpy matrix containing the image, with a window of
|
||||
win_size x win_size.
|
||||
"""
|
||||
f = Filter()
|
||||
#cu = CU_DEFAULT
|
||||
f.assert_window_size(win_size)
|
||||
img = in_arry
|
||||
# we process the entire img as float64 to avoid type overflow error
|
||||
img = np.float64(img)
|
||||
img = img + 100
|
||||
|
||||
# lee_filter_array(np.ndarray[double,ndim=2] img,np.ndarray[double,ndim=2] out_arryint win_offset,int win_size):
|
||||
newOUt=lee_Filter_c.lee_filter_array(img,out_arry,win_size)
|
||||
newOUt=newOUt-100
|
||||
out_arry[:,:]=newOUt[:,:]
|
||||
|
||||
# def lee_filter_array(self, in_arry, out_arry, win_size):
|
||||
# """
|
||||
# Apply lee to a numpy matrix containing the image, with a window of
|
||||
# win_size x win_size.
|
||||
# """
|
||||
# cu = CU_DEFAULT
|
||||
# self.assert_window_size(win_size)
|
||||
# img = in_arry
|
||||
# # we process the entire img as float64 to avoid type overflow error
|
||||
# img = np.float64(img)
|
||||
# img = img + 100
|
||||
# img_filtered = np.zeros_like(img)
|
||||
# n, m = img.shape
|
||||
# win_offset = win_size / 2
|
||||
#
|
||||
# for i in range(0, n):
|
||||
# xleft = i - win_offset
|
||||
# xright = i + win_offset
|
||||
#
|
||||
# if xleft < 0:
|
||||
# xleft = 0
|
||||
# if xright >= n:
|
||||
# xright = n
|
||||
#
|
||||
# for j in range(0, m):
|
||||
# yup = j - win_offset
|
||||
# ydown = j + win_offset
|
||||
#
|
||||
# if yup < 0:
|
||||
# yup = 0
|
||||
# if ydown >= m:
|
||||
# ydown = m
|
||||
#
|
||||
# self.assert_indices_in_range(n, m, xleft, xright, yup, ydown)
|
||||
#
|
||||
# pix_value = img[i, j]
|
||||
#
|
||||
# window = img[math.ceil(xleft):int(xright)+1, math.ceil(yup):int(ydown)+1]
|
||||
# w_t = self.weighting(window, cu)
|
||||
# window_mean = window.mean()
|
||||
# new_pix_value = (pix_value * w_t) + (window_mean * (1.0 - w_t))
|
||||
#
|
||||
# if not new_pix_value > 0:
|
||||
# new_pix_value = 0
|
||||
# out_arry[i, j] = round(new_pix_value)
|
||||
# out_arry = out_arry - 100
|
||||
#
|
||||
|
||||
def lee_filter_multiprocess(self, in_paths, out_paths, win_size =3,processes_num=10):
|
||||
if len(in_paths) != len(out_paths):
|
||||
return False
|
||||
# 开启多进程处理
|
||||
pool = multiprocessing.Pool(processes=processes_num)
|
||||
pl = []
|
||||
for i in range(len(in_paths)):
|
||||
#self.lee_filter(in_paths[i], out_paths[i], win_size)
|
||||
pl.append(pool.apply_async(self.lee_filter,(in_paths[i], out_paths[i], win_size)))
|
||||
print("lee_filter runing! path:" + in_paths[i])
|
||||
pool.close()
|
||||
pool.join()
|
||||
return True
|
||||
|
||||
|
||||
def lee_filter_block_multiprocess(self, in_path, out_path, win_size =3):
|
||||
in_name = os.path.basename(in_path)
|
||||
out_name = os.path.basename(out_path)
|
||||
outDir= os.path.split(out_path)[0]
|
||||
#创建工作文件夹
|
||||
src_path = os.path.join(outDir, "src_img")
|
||||
block_path = os.path.join(outDir, "block")
|
||||
block_filtered = os.path.join(outDir, "block_filtered")
|
||||
file.creat_dirs([src_path, block_path, block_filtered])
|
||||
|
||||
shutil.copyfile(in_path, os.path.join(src_path, in_name))
|
||||
cols = ImageHandler.get_img_width(in_path)
|
||||
rows = ImageHandler.get_img_height(in_path)
|
||||
# 分块
|
||||
bp = BlockProcess()
|
||||
block_size = bp.get_block_size(rows, cols)
|
||||
bp.cut(src_path, block_path, ['tif', 'tiff'], 'tif', block_size)
|
||||
logger.info('blocking tifs success!')
|
||||
|
||||
img_dir, img_name = bp.get_file_names(block_path, ['tif'])
|
||||
dir_dict = bp.get_same_img(img_dir, img_name)
|
||||
|
||||
img_path_list = [value for value in dir_dict.values()][0]
|
||||
|
||||
processes_num = min([len(img_path_list), multiprocessing.cpu_count() - 1])
|
||||
|
||||
out_img_path_list =[]
|
||||
for in_path in img_path_list:
|
||||
suffix = bp.get_suffix(os.path.basename(in_path))
|
||||
out_path = os.path.join(block_filtered, out_name.replace('.tif', suffix))
|
||||
out_img_path_list.append(out_path)
|
||||
|
||||
self.lee_filter_multiprocess(img_path_list, out_img_path_list, win_size = win_size, processes_num=processes_num)
|
||||
# 开启多进程处理
|
||||
# pool = multiprocessing.Pool(processes=processes_num)
|
||||
#
|
||||
# for i in range(len(hh_list)):
|
||||
# block_img_path = hh_list[i]
|
||||
# suffix = bp.get_suffix(os.path.basename(hh_list[i]))
|
||||
# filed_block_img_path = os.path.join(block_filtered,out_name.replace('.tif',suffix))
|
||||
# pool.apply_async(self.lee_filter, (block_img_path, filed_block_img_path, win_size))
|
||||
# print("lee_filter runing! path:" + block_img_path)
|
||||
# logger.info('total:%s, block:%s lee_filter!', len(hh_list), i)
|
||||
#
|
||||
# pool.close()
|
||||
# pool.join()
|
||||
# # 合并处理后的影像
|
||||
bp.combine(block_filtered, cols, rows, outDir, file_type=['tif'], datetype='float32')
|
||||
|
||||
file.del_folder(src_path)
|
||||
file.del_folder(block_path)
|
||||
file.del_folder(block_filtered)
|
||||
pass
|
||||
|
||||
def lee_process_sar(self, in_sar, out_sar, win_size, noise_var):
|
||||
'''
|
||||
# std::cout << "mode 12"
|
||||
# std::cout << "SIMOrthoProgram.exe 12 in_sar_path out_sar_path win_size noise_var"
|
||||
'''
|
||||
exe_path = r".\baseTool\x64\Release\SIMOrthoProgram-L-SAR.exe"
|
||||
exe_cmd = r"set PROJ_LIB=.\baseTool\x64\Release; & {0} {1} {2} {3} {4} {5}".format(exe_path, 12, in_sar,
|
||||
out_sar, win_size, noise_var)
|
||||
print(exe_cmd)
|
||||
print(os.system(exe_cmd))
|
||||
print("==========================================================================")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# 示例1:
|
||||
# path = r"I:\MicroWorkspace\product\C-SAR\LeafAreaIndex\Temporary\cai_sartif\HV_0_512_0_512.tif"
|
||||
# f = Filter()
|
||||
# f.lee_filter(path,path,3)
|
||||
#示例2:
|
||||
|
||||
f = Filter()
|
||||
f.lee_filter_block_multiprocess('I:\preprocessed\HH.tif','I:\preprocessed\HHf.tif')
|
||||
pass
|
|
@ -1,124 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:lee_filter.py
|
||||
@Function:lee_filter
|
||||
@Contact: https://github.com/PyRadar/pyradar
|
||||
@Author:SHJ
|
||||
@Date:2021/8/30 8:42
|
||||
@Version:1.0.0
|
||||
"""
|
||||
|
||||
import os
|
||||
cimport cython # 必须导入
|
||||
import numpy as np##必须为c类型和python类型的数据都申明一个np
|
||||
cimport numpy as np # 必须为c类型和python类型的数据都申明一个np
|
||||
from libc.math cimport pi
|
||||
from libc.math cimport atan as math_atan
|
||||
from libc.math cimport log10 as math_log10
|
||||
from libc.math cimport log as math_log
|
||||
from libc.math cimport floor as math_floor
|
||||
from libc.math cimport sqrt as math_sqrt
|
||||
from libc.math cimport exp as math_exp
|
||||
from libc.math cimport sin as math_sin
|
||||
from libc.math cimport cos as math_cos
|
||||
from libc.math cimport tan as math_tan
|
||||
from libc.math cimport asin as math_asin
|
||||
from libc.math cimport acos as math_acos
|
||||
from libc.math cimport tan as math_atan
|
||||
from libc.math cimport sinh as math_sinh
|
||||
from libc.math cimport cosh as math_cosh
|
||||
from libc.math cimport tanh as math_tanh
|
||||
from libc.math cimport floor as math_floor
|
||||
from libc.math cimport ceil as math_ceil
|
||||
from libc.math cimport lround as math_round
|
||||
|
||||
cdef double COEF_VAR_DEFAULT = 0.01
|
||||
cdef double CU_DEFAULT = 0.25
|
||||
|
||||
|
||||
cdef int ceil_usr(double v):
|
||||
return int(math_ceil(v))
|
||||
|
||||
|
||||
|
||||
cdef double weighting(np.ndarray[double,ndim=2] window,double cu):
|
||||
"""
|
||||
Computes the weighthing function for Lee filter using cu as the noise
|
||||
coefficient.
|
||||
"""
|
||||
# cu is the noise variation coefficient
|
||||
cdef double two_cu = cu * cu
|
||||
|
||||
# ci is the variation coefficient in the window
|
||||
cdef double window_mean = window.mean()
|
||||
cdef double window_std = window.std()
|
||||
cdef double ci = window_std / window_mean
|
||||
|
||||
cdef double two_ci = ci * ci
|
||||
cdef double w_t=0;
|
||||
if not (two_ci==0): # dirty patch to avoid zero division
|
||||
two_ci = COEF_VAR_DEFAULT
|
||||
|
||||
if cu > ci:
|
||||
w_t = 0.0
|
||||
else:
|
||||
w_t = 1.0 - (two_cu / two_ci)
|
||||
|
||||
return w_t
|
||||
|
||||
cpdef np.ndarray[double,ndim=2] lee_filter_array(np.ndarray[double,ndim=2] img,np.ndarray[double,ndim=2] out_arry,int win_size):
|
||||
"""
|
||||
Apply lee to a numpy matrix containing the image, with a window of
|
||||
win_size x win_size.
|
||||
"""
|
||||
# we process the entire img as float64 to avoid type overflow error
|
||||
#n, m = img.shape
|
||||
cdef double cu = CU_DEFAULT
|
||||
cdef int i=0
|
||||
cdef int j=0
|
||||
cdef int xleft=0
|
||||
cdef int xright=0
|
||||
cdef int yup=0
|
||||
cdef int ydown=0
|
||||
cdef np.ndarray[double,ndim=2] window;
|
||||
cdef double w_t=0;
|
||||
cdef double window_mean=0;
|
||||
cdef double new_pix_valu=0;
|
||||
cdef int n = img.shape[0]
|
||||
cdef int m=img.shape[1]
|
||||
cdef int win_offset=int(win_size/2)
|
||||
|
||||
while i<n:
|
||||
xleft=ceil_usr(i-win_offset)
|
||||
xright=int(i+win_offset)
|
||||
if xleft < 0:
|
||||
xleft = 0
|
||||
if xright >= n:
|
||||
xright = n
|
||||
j=0
|
||||
while j<m:
|
||||
yup = ceil_usr(j - win_offset)
|
||||
yup=0 if yup<0 else yup
|
||||
ydown = int(j + win_offset)
|
||||
if yup < 0:
|
||||
yup = 0
|
||||
if ydown >= m:
|
||||
ydown = m
|
||||
|
||||
pix_value = img[i, j]
|
||||
|
||||
window = img[xleft:xright+1, yup:ydown+1]
|
||||
|
||||
w_t = weighting(window, cu)
|
||||
|
||||
window_mean = np.mean(window)
|
||||
new_pix_value = (pix_value * w_t) + (window_mean * (1.0 - w_t))
|
||||
|
||||
if not new_pix_value > 0:
|
||||
new_pix_value = 0
|
||||
out_arry[i, j] = round(new_pix_value*100000.0)/100000.0
|
||||
j=j+1
|
||||
i=i+1
|
||||
return out_arry
|
||||
|
Binary file not shown.
|
@ -1,45 +0,0 @@
|
|||
from setuptools import setup
|
||||
from setuptools.extension import Extension
|
||||
from Cython.Distutils import build_ext
|
||||
from Cython.Build import cythonize
|
||||
import numpy
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
|
||||
class MyBuildExt(build_ext):
|
||||
def run(self):
|
||||
build_ext.run(self)
|
||||
|
||||
build_dir = Path(self.build_lib)
|
||||
root_dir = Path(__file__).parent
|
||||
target_dir = build_dir if not self.inplace else root_dir
|
||||
|
||||
self.copy_file(Path('./lee_Filter') / '__init__.py', root_dir, target_dir)
|
||||
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
|
||||
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
|
||||
def copy_file(self, path, source_dir, destination_dir):
|
||||
if not (source_dir / path).exists():
|
||||
return
|
||||
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
|
||||
|
||||
setup(
|
||||
name="MyModule",
|
||||
ext_modules=cythonize(
|
||||
[
|
||||
#Extension("pkg1.*", ["root/pkg1/*.py"]),
|
||||
Extension("pkg2.*", ["./lee_Filter/lee_Filter_c.pyx"]),
|
||||
#Extension("1.*", ["root/*.py"])
|
||||
],
|
||||
build_dir="build",
|
||||
compiler_directives=dict(
|
||||
always_allow_keywords=True
|
||||
)),
|
||||
cmdclass=dict(
|
||||
build_ext=MyBuildExt
|
||||
),
|
||||
packages=[],
|
||||
include_dirs=[numpy.get_include()],
|
||||
)
|
||||
|
||||
# 指令: python setup.py build_ext --inplace
|
|
@ -1,106 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :logHandler.py
|
||||
@Function :日志检查、生成
|
||||
@Author :SHJ
|
||||
@Date :2021/12/1
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import datetime
|
||||
|
||||
import colorlog
|
||||
|
||||
|
||||
class LogHandler:
|
||||
"""
|
||||
生成日志
|
||||
"""
|
||||
__logger = logging.getLogger("mylog")
|
||||
__format_str = logging.Formatter("[%(asctime)s] [%(process)d] [%(levelname)s] - %(module)s.%(funcName)s "
|
||||
"(%(filename)s:%(lineno)d) - %(message)s")
|
||||
__log_path = None
|
||||
|
||||
__log_colors_config = {
|
||||
'DEBUG': 'blue',
|
||||
'INFO': 'cyan',
|
||||
'WARNING': 'yellow',
|
||||
'ERROR': 'red',
|
||||
'CRITICAL': 'red',
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def init_log_handler(log_name):
|
||||
"""
|
||||
初始化日志
|
||||
:param log_name: 日志保存的路径和名称
|
||||
:return:
|
||||
"""
|
||||
path = os.getcwd()
|
||||
current_time = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time.time()))
|
||||
LogHandler.__log_path = os.path.join(path, log_name + current_time + ".log")
|
||||
para_dir = os.path.split(LogHandler.__log_path)
|
||||
if not os.path.exists(para_dir[0]):
|
||||
os.makedirs(para_dir[0])
|
||||
# 删除七天以前的文件
|
||||
LogHandler.delete_outdate_files(para_dir[0])
|
||||
|
||||
# 方法1:普通日志
|
||||
log_format = "[%(asctime)s] [%(process)d] [%(levelname)s]- %(message)s ---from: %(module)s.%(funcName)s" \
|
||||
" (%(filename)s:Line%(lineno)d) "
|
||||
date_format = "%m/%d/%Y %H:%M:%S"
|
||||
formatter = colorlog.ColoredFormatter(
|
||||
"%(log_color)s[%(asctime)s] [%(process)d] [%(levelname)s]- %(message)s ---from: %(module)s.%(funcName)s"
|
||||
" (%(filename)s:Line%(lineno)d) ",
|
||||
log_colors=LogHandler.__log_colors_config)
|
||||
fp = logging.FileHandler(LogHandler.__log_path, encoding='utf-8')
|
||||
fs = logging.StreamHandler()
|
||||
fs.setFormatter(formatter)
|
||||
# logging.basicConfig(level=logging.INFO, format=log_format, datefmt=date_format, handlers=[fp, fs]) # 调用
|
||||
logging.basicConfig(level=logging.INFO, datefmt=date_format, handlers=[fp, fs]) # 调用
|
||||
|
||||
# 方法2:回滚日志
|
||||
# LogHandler.__logger.setLevel(logging.DEBUG)
|
||||
# th = handlers.TimedRotatingFileHandler(filename=LogHandler.__log_path, when='S', interval=1,
|
||||
# backupCount=2, encoding='utf-8')
|
||||
# th.suffix = "%Y-%m-%d-%H-%M-%S.log"
|
||||
# th.setFormatter(LogHandler.__format_str)
|
||||
# th.setLevel(level=logging.DEBUG)
|
||||
|
||||
# console = logging.StreamHandler()
|
||||
# console.setLevel(logging.INFO)
|
||||
# LogHandler.__logger.addHandler(console)
|
||||
# LogHandler.__logger.addHandler(th)
|
||||
|
||||
@staticmethod
|
||||
def delete_outdate_files(path, date_interval=7):
|
||||
"""
|
||||
删除目录下七天前创建的文件
|
||||
"""
|
||||
current_time = time.strftime("%Y-%m-%d", time.localtime(time.time()))
|
||||
current_time_list = current_time.split("-")
|
||||
current_time_day = datetime.datetime(int(current_time_list[0]), int(current_time_list[1]),
|
||||
int(current_time_list[2]))
|
||||
for root, dirs, files in os.walk(path):
|
||||
for item in files:
|
||||
item_format = item.split(".", 2)
|
||||
if item_format[1] == "log":
|
||||
file_path = os.path.join(root, item)
|
||||
create_time = time.strftime("%Y-%m-%d", time.localtime((os.stat(file_path)).st_mtime))
|
||||
create_time_list = create_time.split("-")
|
||||
create_time_day = datetime.datetime(int(create_time_list[0]), int(create_time_list[1]),
|
||||
int(create_time_list[2]))
|
||||
time_difference = (current_time_day - create_time_day).days
|
||||
if time_difference > date_interval:
|
||||
os.remove(file_path)
|
||||
|
||||
#
|
||||
# if __name__ == "__main__":
|
||||
# # eg2:
|
||||
# log_handler = LogHandler()
|
||||
# log_handler.init_log_handler(r"run_log\myrun1")
|
||||
# logging.warning("1")
|
||||
# print("done")
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,90 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Sun Mar 14 18:53:14 2021
|
||||
|
||||
@author: Dipankar
|
||||
References
|
||||
----------
|
||||
Oh (2004): Quantitative retrieval of soil moisture content and surface roughness from multipolarized radar observations of bare soil surface. IEEE TGRS 42(3). 596-601.
|
||||
"""
|
||||
|
||||
# ---------------------------------------------------------------------------------------
|
||||
# Copyright (C) 2021 by Microwave Remote Sensing Lab, IITBombay http://www.mrslab.in
|
||||
|
||||
# This program is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the Free
|
||||
# Software Foundation; either version 3 of the License, or (at your option)
|
||||
# any later version.
|
||||
# This program is distributed in the hope that it will be useful, but WITHOUT
|
||||
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
|
||||
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
|
||||
# more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, see http://www.gnu.org/licenses/
|
||||
# ---------------------------------------------------------------------------------------
|
||||
|
||||
|
||||
import numpy as np
|
||||
#import matplotlib.pyplot as plt
|
||||
|
||||
|
||||
## Description: Given sigma_0_vv, sigma_0_hh, and sigma_0_hv, the inverse
|
||||
## model computes s, and mv
|
||||
|
||||
sigma0vvdB = -14.1
|
||||
sigma0hhdB = -16.0
|
||||
sigma0hvdB = -26.5
|
||||
theta = 35. ##Incidence angle
|
||||
f = 5.0 ##GHz
|
||||
|
||||
k = 2*np.pi*f/0.3 #calculate the wave number
|
||||
|
||||
|
||||
|
||||
|
||||
theta_rad = theta*np.pi/180 #represent angle in radians
|
||||
|
||||
sigma_0_vv = np.power(10,(sigma0vvdB/10)) #%represent data in linear scale
|
||||
sigma_0_hh = np.power(10,(sigma0hhdB/10))
|
||||
sigma_0_hv = np.power(10,(sigma0hvdB/10))
|
||||
|
||||
|
||||
p = sigma_0_hh / sigma_0_vv #calculate the p-ratio
|
||||
q = sigma_0_hv / sigma_0_vv #calculate the q-ratio
|
||||
|
||||
mv0 = np.arange(0.05,0.5,0.01) # set Gamma0 range of values (fine increments)
|
||||
|
||||
|
||||
|
||||
## First estimates s1 and mv1
|
||||
ks = ((-1)*3.125*np.log(1 - sigma_0_hv/(0.11 * mv0**0.7 * (np.cos(theta_rad))**2.2)))**0.556
|
||||
err = (1 - (2.*theta_rad/np.pi)**(0.35*mv0**(-0.65)) * np.exp(-0.4 * ks**1.4))-p
|
||||
abs_err = np.abs(err)
|
||||
min_err = np.min(abs_err) #find the value of minimum error
|
||||
mv1 = mv0[np.where(abs_err == min_err)]
|
||||
ks1 = ((-1)*3.125*np.log(1 - sigma_0_hv/(0.11 * mv1**0.7 * (np.cos(theta_rad))**2.2)))**0.556
|
||||
s1 = ks1/k
|
||||
|
||||
|
||||
## Second estimate s2 and mv2
|
||||
ks2 = (np.log(1-(q/(0.095 * (0.13 + np.sin(1.5*theta_rad))**1.4))) /(-1.3))**(10./9.)
|
||||
s2 = ks2/k
|
||||
|
||||
xx = (1-p)/np.exp(-0.4 * ks2**1.4)
|
||||
if xx<=0:
|
||||
mv2 =0
|
||||
else:
|
||||
yy = np.log(xx)/(0.35*np.log(2*theta_rad/np.pi))
|
||||
mv2 = yy**(-100/65)
|
||||
print(mv2,yy,np.power(yy,-100/65))
|
||||
## Third estimate mv3
|
||||
mv3 = ((sigma_0_hv/(1 - np.exp(-0.32 * ks2**1.8)))/(0.11 * np.cos(theta_rad)**2.2))**(1/0.7)
|
||||
|
||||
## weighted average s and mv-------------------------------------
|
||||
sf = (s1 + 0.25*s2)/(1+0.25)
|
||||
mvf = (mv1+mv2+mv3)/3
|
||||
print(mv1,mv2,mv3,s1,s2)
|
||||
|
||||
print('Estimated rms height s (cm): ', sf*100)
|
||||
print('Estimated volumetric soil moisture: ', mvf)
|
|
@ -1,128 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
"""
|
||||
Created on Tue Jun 4 14:59:54 2013
|
||||
|
||||
@author: Sat Kumar Tomer
|
||||
@email: satkumartomer@gmail.com
|
||||
@website: www.ambhas.com
|
||||
|
||||
"""
|
||||
cimport cython # 必须导入
|
||||
import numpy as np##必须为c类型和python类型的数据都申明一个np
|
||||
cimport numpy as np # 必须为c类型和python类型的数据都申明一个np
|
||||
from libc.math cimport pi
|
||||
from scipy.optimize import fmin
|
||||
|
||||
cpdef np.ndarray[double,ndim=1] inverse_oh2004(double sigma0vvdB,double sigma0hhdB,double sigma0hvdB,double theta,double f):
|
||||
"""
|
||||
sigma0vvdB = -14.1 dB
|
||||
sigma0hhdB = -16.0
|
||||
sigma0hvdB = -26.5
|
||||
theta = 35. 角度值 ##Incidence angle
|
||||
f = 5.0 ##GHz
|
||||
"""
|
||||
#print("--------------------------------------------------------\n")
|
||||
cdef np.ndarray[double,ndim=1] result=np.ones((2))
|
||||
result[0]=np.nan
|
||||
result[1]=np.nan
|
||||
#print("*************设置为nan****************")
|
||||
#print(sigma0vvdB,sigma0hhdB,sigma0hvdB,theta,f)
|
||||
cdef double k = 2*3.1415926*f/0.299792458; #calculate the wave number
|
||||
cdef double theta_rad = theta*3.1415926/180; #represent angle in radians
|
||||
|
||||
cdef double sigma_0_vv = np.power(10.,(sigma0vvdB/10.)) #%represent data in linear scale
|
||||
cdef double sigma_0_hh = np.power(10.,(sigma0hhdB/10.))
|
||||
cdef double sigma_0_hv = np.power(10.,(sigma0hvdB/10.))
|
||||
|
||||
if sigma_0_vv==0:
|
||||
#print("***********sigma_0_vv==0*************")
|
||||
return result
|
||||
cdef double p = sigma_0_hh / sigma_0_vv; #calculate the p-ratio
|
||||
cdef double q = sigma_0_hv / sigma_0_vv; #calculate the q-ratio
|
||||
|
||||
|
||||
cdef np.ndarray[double,ndim=1] mv0 = np.arange(0.05,0.9,0.01) # set Gamma0 range of values (fine increments)
|
||||
|
||||
## First estimates s1 and mv1
|
||||
cdef np.ndarray[double,ndim=1] ks = ((-1.)*3.125*np.log(1 - sigma_0_hv/(0.11 * mv0**0.7 * (np.cos(theta_rad))**2.2)))**0.556
|
||||
cdef np.ndarray[double,ndim=1] err = (1. - (2.*theta_rad/np.pi)**(0.35*mv0**(-0.65)) * np.exp(-0.4 * ks**1.4))-p
|
||||
cdef np.ndarray[double,ndim=1] abs_err = np.abs(err);
|
||||
cdef double min_err = np.nanmin(abs_err); #find the value of minimum error
|
||||
#print(np.where(abs_err == min_err)[0].shape)
|
||||
if min_err==np.nan or np.max(np.where(abs_err == min_err)[0].shape)==0 :
|
||||
#print("***************min_err==np.nan or np.max(np.where(abs_err == min_err)[0].shape)==0")
|
||||
return result
|
||||
cdef double mv1 = mv0[np.where(abs_err == min_err)[0][0]]
|
||||
cdef double temp_ks1=1. - sigma_0_hv/(0.11 * mv1**0.7 * (np.cos(theta_rad))**2.2)
|
||||
if temp_ks1<0:
|
||||
#print("*********************temp_ks1<0")
|
||||
return result
|
||||
cdef double ks1 = ((-1)*3.125*np.log(temp_ks1))**0.556
|
||||
cdef double s1 = ks1/k
|
||||
|
||||
## Second estimate s2 and mv2
|
||||
cdef double ks2 = (np.log(1-(q/(0.095 * (0.13 + np.sin(1.5*theta_rad))**1.4))) /(-1.3))**(10./9.)
|
||||
cdef double s2 = ks2/k
|
||||
cdef double mv2 =0.
|
||||
cdef double yy =0.
|
||||
cdef double xx = (1-p)/np.exp(-0.4 * ks2**1.4)
|
||||
if xx<=0:
|
||||
mv2 =0.
|
||||
else:
|
||||
yy = np.log(xx)/(0.35*np.log(2*theta_rad/np.pi))
|
||||
mv2=np.power(yy,-100.0/65)
|
||||
|
||||
## Third estimate mv3
|
||||
cdef double mv3 = ((sigma_0_hv/(1 - np.exp(-0.32 * ks2**1.8)))/(0.11 * np.cos(theta_rad)**2.2))**(1/0.7)
|
||||
## weighted average s and mv-------------------------------------
|
||||
#print("q:\t",q)
|
||||
#print("k:\t",k)
|
||||
#print("ks1:\t",ks1)
|
||||
#print("ks2:\t",ks2)
|
||||
#print("theta_rad:\t",theta_rad)
|
||||
|
||||
cdef double sf = (s1 + 0.25*s2)/(1+0.25)
|
||||
cdef double mvf = (mv1+mv2+mv3)/3
|
||||
|
||||
result[0]=mvf*1.0
|
||||
result[1]=sf*1.0
|
||||
#print("mv1:\t",mv1)
|
||||
#print("mv2:\t",mv2)
|
||||
#print("mv3:\t",mv3)
|
||||
#print("s1:\t",s1)
|
||||
#print("s2:\t",s2)
|
||||
#print("Estimated volumetric soil moisture: ",result[0])
|
||||
#print("Estimated rms height s (m): ",result[1])
|
||||
#print("\nend\n")
|
||||
return result
|
||||
|
||||
cpdef double lamda2freq(double lamda):
|
||||
return 299792458.0/lamda
|
||||
|
||||
cpdef double freq2lamda(double freq):
|
||||
return 299792458.0/freq
|
||||
|
||||
# double sigma0vvdB,double sigma0hhdB,double sigma0hvdB,double theta,double f
|
||||
cpdef int retrieve_oh2004_main(int n,np.ndarray[double,ndim=1] mv,np.ndarray[double,ndim=1] h,np.ndarray[int,ndim=1] mask,np.ndarray[double,ndim=1] sigma0vvdB,np.ndarray[double,ndim=1] sigma0hhdB,np.ndarray[double,ndim=1] sigma0hvdB, np.ndarray[double,ndim=1] vh, np.ndarray[double,ndim=1] theta,double f):
|
||||
cdef int i=0;
|
||||
cdef np.ndarray[double,ndim=1] result;
|
||||
while i<n:
|
||||
if mask[i]<0.5:
|
||||
mv[i]=np.nan
|
||||
h[i] =np.nan
|
||||
else:
|
||||
#print(i)
|
||||
##print(sigma0vvdB[i], sigma0hhdB[i],sigma0hvdB[i], theta[i], f)
|
||||
result= inverse_oh2004(sigma0vvdB[i], sigma0hhdB[i],sigma0hvdB[i], theta[i], f)
|
||||
##print(result)
|
||||
mv[i]=result[0]
|
||||
h[i] =result[1]
|
||||
##print(mv[i],h[i])
|
||||
##print(result[0],result[1])
|
||||
i=i+1
|
||||
return 1
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -1,45 +0,0 @@
|
|||
from setuptools import setup
|
||||
from setuptools.extension import Extension
|
||||
from Cython.Distutils import build_ext
|
||||
from Cython.Build import cythonize
|
||||
import numpy
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
|
||||
class MyBuildExt(build_ext):
|
||||
def run(self):
|
||||
build_ext.run(self)
|
||||
|
||||
build_dir = Path(self.build_lib)
|
||||
root_dir = Path(__file__).parent
|
||||
target_dir = build_dir if not self.inplace else root_dir
|
||||
|
||||
self.copy_file(Path('./oh2004') / '__init__.py', root_dir, target_dir)
|
||||
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
|
||||
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
|
||||
def copy_file(self, path, source_dir, destination_dir):
|
||||
if not (source_dir / path).exists():
|
||||
return
|
||||
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
|
||||
|
||||
setup(
|
||||
name="MyModule",
|
||||
ext_modules=cythonize(
|
||||
[
|
||||
#Extension("pkg1.*", ["root/pkg1/*.py"]),
|
||||
Extension("pkg2.*", ["./oh2004/oh2004.pyx"]),
|
||||
#Extension("1.*", ["root/*.py"])
|
||||
],
|
||||
build_dir="build",
|
||||
compiler_directives=dict(
|
||||
always_allow_keywords=True
|
||||
)),
|
||||
cmdclass=dict(
|
||||
build_ext=MyBuildExt
|
||||
),
|
||||
packages=[],
|
||||
include_dirs=[numpy.get_include()],
|
||||
)
|
||||
|
||||
# 指令: python setup.py build_ext --inplace
|
|
@ -1,26 +0,0 @@
|
|||
|
||||
import numpy as np
|
||||
import oh2004
|
||||
|
||||
sigma0vvdB = -14.1
|
||||
sigma0hhdB = -16.0
|
||||
sigma0hvdB = -26.5
|
||||
theta = 35. ##Incidence angle
|
||||
f = 5.0 ##GHz
|
||||
#print(sigma0vvdB,sigma0hhdB,sigma0hvdB,theta,f)
|
||||
#print(oh2004.inverse_oh2004(sigma0vvdB,sigma0hhdB,sigma0hvdB,theta,f))
|
||||
|
||||
n=3
|
||||
mask=np.ones((3))
|
||||
mask[1]=0
|
||||
mask=mask.astype(np.int32)
|
||||
sigma0hhdB=np.ones((3))*sigma0hhdB
|
||||
sigma0vvdB=np.ones((3))*sigma0vvdB
|
||||
sigma0hvdB=np.ones((3))*sigma0hvdB
|
||||
theta=np.ones((3))*theta
|
||||
mv=np.zeros(3)*1.0
|
||||
h=np.zeros(3)*1.0
|
||||
oh2004.retrieve_oh2004_main(n,mv,h, mask,sigma0vvdB,sigma0hhdB,sigma0hvdB,sigma0hvdB, theta,f)
|
||||
print(mask)
|
||||
print(mv)
|
||||
print(h)
|
Binary file not shown.
|
@ -1,92 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:sieve_filter.py
|
||||
@Function:gdal斑点滤波功能
|
||||
@Contact: 'https://www.osgeo.cn/gdal/api/gdal_alg.html?highlight=gdalsievefilter#'
|
||||
'_CPPv415GDALSieveFilter15GDALRasterBandH15GDALRasterBandH15GDALRasterBandHiiPPc16GDALProgressFuncPv'
|
||||
@Author:SHJ
|
||||
@Date:2021/8/30 8:42
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import logging
|
||||
from osgeo import gdal
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
def gdal_sieve_filter_test(dst_filename, src_filename, threshold=100, connectedness=8):
|
||||
"""
|
||||
基于python GDAL栅格滤波
|
||||
:param dst_filename: 输出滤波后的影像
|
||||
:param src_filename: 输入需要处理的文件
|
||||
:param threshold: 滤波的值大小
|
||||
:param connectedness: 连通域, 范围:4或者8
|
||||
:return:
|
||||
"""
|
||||
# 4表示对角像素不被视为直接相邻用于多边形成员资格,8表示对角像素不相邻
|
||||
# connectedness = 4
|
||||
gdal.AllRegister()
|
||||
# print('需要处理滤波的栅格文件:{},阈值(分辨率):{}'.format(src_filename, threshold))
|
||||
dataset = gdal.Open(src_filename, gdal.GA_Update)
|
||||
if dataset is None:
|
||||
logger.error('{}open tif fail!'.format(src_filename))
|
||||
return False
|
||||
# 获取需要处理的源栅格波段
|
||||
src_band = dataset.GetRasterBand(1)
|
||||
mask_band = src_band.GetMaskBand()
|
||||
dst_band = src_band
|
||||
prog_func = gdal.TermProgress_nocb
|
||||
# 调用gdal滤波函数
|
||||
result = gdal.SieveFilter(src_band, mask_band, dst_band, threshold, connectedness, callback=prog_func)
|
||||
if result != 0:
|
||||
return False
|
||||
proj = dataset.GetProjection()
|
||||
geotransform = dataset.GetGeoTransform()
|
||||
dst_array = dst_band.ReadAsArray(0, 0, dst_band.XSize, dst_band.YSize)
|
||||
ImageHandler.write_img(dst_filename, proj, geotransform, dst_array)
|
||||
del dataset
|
||||
return True
|
||||
|
||||
def gdal_sieve_filter(dst_filename, src_filename, threshold=2, connectedness=4):
|
||||
"""
|
||||
基于python GDAL栅格滤波
|
||||
:param dst_filename: 输出滤波后的影像
|
||||
:param src_filename: 输入需要处理的文件
|
||||
:param threshold: 滤波的值大小
|
||||
:param connectedness: 连通域, 范围:4或者8
|
||||
:return:
|
||||
"""
|
||||
# 4表示对角像素不被视为直接相邻用于多边形成员资格,8表示对角像素不相邻
|
||||
# connectedness = 4
|
||||
gdal.AllRegister()
|
||||
# print('需要处理滤波的栅格文件:{},阈值(分辨率):{}'.format(src_filename, threshold))
|
||||
dataset = gdal.Open(src_filename, gdal.GA_Update)
|
||||
if dataset is None:
|
||||
logger.error('{}open tif fail!'.format(src_filename))
|
||||
return False
|
||||
# 获取需要处理的源栅格波段
|
||||
src_band = dataset.GetRasterBand(1)
|
||||
#只能处理整数
|
||||
src_array = src_band.ReadAsArray(0, 0, src_band.XSize, src_band.YSize)
|
||||
src_array = src_array * 1000
|
||||
src_band.WriteArray(src_array)
|
||||
mask_band = None
|
||||
dst_band = src_band
|
||||
prog_func = gdal.TermProgress_nocb
|
||||
# 调用gdal滤波函数
|
||||
result = gdal.SieveFilter(src_band, mask_band, dst_band, threshold, connectedness, callback=prog_func)
|
||||
if result != 0:
|
||||
return False
|
||||
proj = dataset.GetProjection()
|
||||
geotransform = dataset.GetGeoTransform()
|
||||
dst_array = dst_band.ReadAsArray(0, 0, dst_band.XSize, dst_band.YSize)
|
||||
dst_array = dst_array / 1000
|
||||
ImageHandler.write_img(dst_filename, proj, geotransform, dst_array)
|
||||
del dataset
|
||||
return True
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
inputfile = r'D:\DATA\testdata\srcimg\GLCFCS30_E110N25.tif'
|
||||
outputfile = r'D:\DATA\testdata\srcimg\GLCFCS30_E110N25_sieve_filter.tif'
|
||||
flag = gdal_sieve_filter(outputfile, inputfile, threshold=100, connectedness=4)
|
|
@ -1,449 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project : microproduct
|
||||
@File : blockprocess.py
|
||||
@Function : tif、tiff图像分块处理拼接功能
|
||||
@Contact : https://blog.csdn.net/qq_38308388/article/details/102978755
|
||||
@Author:SHJ
|
||||
@Date:2021/9/6
|
||||
@Version:1.0.0
|
||||
"""
|
||||
from osgeo import osr, gdal
|
||||
import numpy as np
|
||||
import os
|
||||
from PIL import Image
|
||||
# import time
|
||||
# from skimage import io
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
|
||||
class BlockProcess:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get_block_size(rows, cols):
|
||||
block_size = 512
|
||||
if rows > 2048 and cols > 2048:
|
||||
block_size = 1024
|
||||
return block_size
|
||||
|
||||
# def get_block_size(rows, cols, block_size_config):
|
||||
# block_size = 512 if block_size_config < 512 else block_size_config
|
||||
# if rows > 2048 and cols > 2048:
|
||||
# block_size = block_size_config
|
||||
# return block_size
|
||||
|
||||
@staticmethod
|
||||
def get_suffix(path_name):
|
||||
name = path_name
|
||||
suffix = '_' + name.split('_')[-4] + '_' + name.split('_')[-3] + '_' + name.split('_')[-2] + '_' + \
|
||||
name.split('_')[-1]
|
||||
return suffix
|
||||
|
||||
@staticmethod
|
||||
def get_file_names(data_dir, file_type=['tif', 'tiff']):
|
||||
"""
|
||||
获取data_dir文件夹下file_type类型的文件路径
|
||||
"""
|
||||
result_dir = []
|
||||
result_name = []
|
||||
for maindir, subdir, file_name_list in os.walk(data_dir):
|
||||
for filename in file_name_list:
|
||||
apath = os.path.join(maindir, filename)
|
||||
ext = apath.split('.')[-1]
|
||||
if ext in file_type:
|
||||
result_dir.append(apath)
|
||||
result_name.append(filename)
|
||||
else:
|
||||
pass
|
||||
return result_dir, result_name
|
||||
|
||||
@staticmethod
|
||||
def get_same_img(img_dir, img_name):
|
||||
"""
|
||||
在img_dir路径下,用img_name的子图像路径集合,将集合以字典输出
|
||||
"""
|
||||
result = {}
|
||||
for idx, name in enumerate(img_name):
|
||||
temp_name = ''
|
||||
for idx2, item in enumerate(name.split('_')[:-4]):
|
||||
if idx2 == 0:
|
||||
temp_name = temp_name + item
|
||||
else:
|
||||
temp_name = temp_name + '_' + item
|
||||
|
||||
if temp_name in result:
|
||||
result[temp_name].append(img_dir[idx])
|
||||
else:
|
||||
result[temp_name] = []
|
||||
result[temp_name].append(img_dir[idx])
|
||||
return result
|
||||
|
||||
@staticmethod
|
||||
def assign_spatial_reference_byfile(src_path, dst_path):
|
||||
"""
|
||||
将src_path的地理信息,输入到dst_path图像中
|
||||
"""
|
||||
src_ds = gdal.Open(src_path, gdal.GA_ReadOnly)
|
||||
if src_ds is None:
|
||||
return False
|
||||
sr = osr.SpatialReference()
|
||||
sr.ImportFromWkt(src_ds.GetProjectionRef())
|
||||
geo_transform = src_ds.GetGeoTransform()
|
||||
|
||||
dst_ds = gdal.Open(dst_path, gdal.GA_Update)
|
||||
if dst_ds is None:
|
||||
return False
|
||||
dst_ds.SetProjection(sr.ExportToWkt())
|
||||
dst_ds.SetGeoTransform(geo_transform)
|
||||
del dst_ds
|
||||
del src_ds
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def assign_spatial_reference_bypoint(row_begin, col_begin, src_proj, src_geo, img_path):
|
||||
"""
|
||||
将src_path的地理信息,输入到dst_path图像中
|
||||
"""
|
||||
|
||||
sr = osr.SpatialReference()
|
||||
sr.ImportFromWkt(src_proj)
|
||||
geo_transform = src_geo
|
||||
geo_transform[0] = src_geo[0] + col_begin * src_geo[1] + row_begin * src_geo[2]
|
||||
geo_transform[3] = src_geo[3] + col_begin * src_geo[4] + row_begin * src_geo[5]
|
||||
dst_ds = gdal.Open(img_path, gdal.GA_Update)
|
||||
if dst_ds is None:
|
||||
return False
|
||||
dst_ds.SetProjection(sr.ExportToWkt())
|
||||
dst_ds.SetGeoTransform(geo_transform)
|
||||
del dst_ds
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def __get_band_array(filename, num):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:param num: 波段序号
|
||||
:return: 对应波段的矩阵数据
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
bands = dataset.GetRasterBand(num)
|
||||
array = bands.ReadAsArray(0, 0, bands.XSize, bands.YSize)
|
||||
del dataset
|
||||
return array
|
||||
|
||||
@staticmethod
|
||||
def get_data(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 获取所有波段的数据
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
im_width = dataset.RasterXSize
|
||||
im_height = dataset.RasterYSize
|
||||
im_data = dataset.ReadAsArray(0, 0, im_width, im_height)
|
||||
del dataset
|
||||
return im_data
|
||||
|
||||
def get_tif_dtype(self, filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: tif数据类型
|
||||
"""
|
||||
image = self.__get_band_array(filename, 1)
|
||||
return image.dtype.name
|
||||
|
||||
def cut(self, in_dir, out_dir, file_type=['tif', 'tiff'], out_type='tif', out_size=2048):
|
||||
"""
|
||||
:param in_dir:存放待裁剪的影像文件夹,不用指定到tif文件
|
||||
:param out_dir:存放裁剪结果的影像文件夹
|
||||
:param file_type:待裁剪的影像文件类型tif、tiff、bmp、jpg、png等等
|
||||
:param out_type:裁剪结果影像文件类型
|
||||
:param out_size:裁剪尺寸,裁剪为n*n的方形
|
||||
:return: True or Flase
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
data_dir_list, _ = self.get_file_names(in_dir, file_type)
|
||||
count = 0
|
||||
|
||||
for each_dir in data_dir_list:
|
||||
|
||||
name_suffix = os.path.basename(each_dir)
|
||||
img_name = os.path.splitext(name_suffix)[0]
|
||||
|
||||
# gdal读取方法
|
||||
image = self.__get_band_array(each_dir, 1)
|
||||
|
||||
cut_factor_row = int(np.ceil(image.shape[0] / out_size))
|
||||
cut_factor_clo = int(np.ceil(image.shape[1] / out_size))
|
||||
for i in range(cut_factor_row):
|
||||
for j in range(cut_factor_clo):
|
||||
|
||||
if i == cut_factor_row - 1:
|
||||
i = image.shape[0] / out_size - 1
|
||||
else:
|
||||
pass
|
||||
|
||||
if j == cut_factor_clo - 1:
|
||||
j = image.shape[1] / out_size - 1
|
||||
else:
|
||||
pass
|
||||
|
||||
start_x = int(np.rint(i * out_size))
|
||||
start_y = int(np.rint(j * out_size))
|
||||
end_x = int(np.rint((i + 1) * out_size))
|
||||
end_y = int(np.rint((j + 1) * out_size))
|
||||
out_dir_images = os.path.join(out_dir, img_name + '_' + str(start_x) + '_' + str(end_x) + '_' + str(start_y) + '_' + str(
|
||||
end_y) + '.' + out_type)
|
||||
# + '/' + img_name \
|
||||
# + '_' + str(start_x) + '_' + str(end_x) + '_' + str(start_y) + '_' + str(
|
||||
# end_y) + '.' + out_type
|
||||
|
||||
# temp_image = image[start_x:end_x, start_y:end_y]
|
||||
# out_image = Image.fromarray(temp_data)
|
||||
# out_image = Image.fromarray(temp_image)
|
||||
# out_image.save(out_dir_images)
|
||||
|
||||
data = ImageHandler.get_data(each_dir)
|
||||
if ImageHandler.get_bands(each_dir) > 1:
|
||||
temp_data = data[:,start_x:end_x, start_y:end_y]
|
||||
else:
|
||||
temp_data = data[start_x:end_x, start_y:end_y]
|
||||
ImageHandler.write_img(out_dir_images, '', [0, 0, 0, 0, 0, 0], temp_data)
|
||||
count += 1
|
||||
return True
|
||||
|
||||
def cut_new(self, in_dir, out_dir, file_type=['tif', 'tiff'], out_type='tif', out_size=2048):
|
||||
"""
|
||||
:param in_dir:存放待裁剪的影像文件夹,不用指定到tif文件
|
||||
:param out_dir:存放裁剪结果的影像文件夹
|
||||
:param file_type:待裁剪的影像文件类型tif、tiff、bmp、jpg、png等等
|
||||
:param out_type:裁剪结果影像文件类型
|
||||
:param out_size:裁剪尺寸,裁剪为n*n的方形
|
||||
:return: True or Flase
|
||||
20230831修改 ----tjx
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
data_dir_list, _ = self.get_file_names(in_dir, file_type)
|
||||
count = 0
|
||||
|
||||
for each_dir in data_dir_list:
|
||||
|
||||
name_suffix = os.path.basename(each_dir)
|
||||
img_name = os.path.splitext(name_suffix)[0]
|
||||
|
||||
# gdal读取方法
|
||||
image = self.__get_band_array(each_dir, 1)
|
||||
|
||||
block_x = int(np.ceil(image.shape[1] / out_size))
|
||||
block_y = int(np.ceil(image.shape[0] / out_size)) # todo 修改分块
|
||||
for i in range(block_y):
|
||||
for j in range(block_x):
|
||||
start_x = j * out_size
|
||||
start_y = i * out_size
|
||||
end_x = image.shape[1] if (j + 1) * out_size > image.shape[1] else (j + 1) * out_size
|
||||
end_y = image.shape[0] if (i + 1) * out_size > image.shape[0] else (i + 1) * out_size
|
||||
|
||||
out_dir_images = os.path.join(out_dir, img_name + '_' + str(start_x) + '_' + str(end_x) + '_' + str(start_y) + '_' + str(
|
||||
end_y) + '.' + out_type)
|
||||
# print(out_dir_images)
|
||||
|
||||
data = ImageHandler.get_data(each_dir)
|
||||
if ImageHandler.get_bands(each_dir) > 1:
|
||||
# temp_data = data[:,start_x:end_x, start_y:end_y]
|
||||
temp_data = data[:,start_y:end_y, start_x:end_x]
|
||||
else:
|
||||
# temp_data = data[start_x:end_x, start_y:end_y]
|
||||
temp_data = data[start_y:end_y, start_x:end_x]
|
||||
ImageHandler.write_img(out_dir_images, '', [0, 0, 0, 0, 0, 0], temp_data)
|
||||
count += 1
|
||||
return True
|
||||
|
||||
def combine(self, data_dir, w, h, out_dir, out_type='tif', file_type=['tif', 'tiff'], datetype='float16'):
|
||||
"""
|
||||
:param data_dir: 存放待裁剪的影像文件夹,不用指定到tif文件
|
||||
:param w 拼接影像的宽度,
|
||||
:param h 拼接影像的高度
|
||||
:param out_dir: 存放裁剪结果的影像文件夹
|
||||
:param out_type: 裁剪结果影像文件类型
|
||||
:param file_type: 待裁剪的影像文件类型
|
||||
:param datetype:数据类型 int8,int16,float16,float32 等
|
||||
:return: True or Flase
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
img_dir, img_name = self.get_file_names(data_dir, file_type)
|
||||
|
||||
dir_dict = self.get_same_img(img_dir, img_name)
|
||||
count = 0
|
||||
for key in dir_dict.keys():
|
||||
temp_label = np.zeros(shape=(h, w), dtype=datetype)
|
||||
dir_list = dir_dict[key]
|
||||
for item in dir_list:
|
||||
name_split = item.split('_')
|
||||
x_start = int(name_split[-4])
|
||||
x_end = int(name_split[-3])
|
||||
y_start = int(name_split[-2])
|
||||
y_end = int(name_split[-1].split('.')[0])
|
||||
# img = Image.open(item)
|
||||
img = ImageHandler.get_band_array(item, 1)
|
||||
img = np.array(img)
|
||||
|
||||
temp_label[x_start:x_end, y_start:y_end] = img
|
||||
|
||||
img_name = key + '.' + out_type
|
||||
new_out_dir = os.path.join(out_dir, img_name)
|
||||
ImageHandler.write_img(new_out_dir, '', [0, 0, 0, 0, 0, 0], temp_label)
|
||||
# label = Image.fromarray(temp_label)
|
||||
# label.save(new_out_dir)
|
||||
|
||||
count += 1
|
||||
return True
|
||||
|
||||
# todo 20230901 修改分块同步修改合并代码
|
||||
def combine_new(self, data_dir, w, h, out_dir, out_type='tif', file_type=['tif', 'tiff'], datetype='float16'):
|
||||
"""
|
||||
:param data_dir: 存放待裁剪的影像文件夹,不用指定到tif文件
|
||||
:param w 拼接影像的宽度,
|
||||
:param h 拼接影像的高度
|
||||
:param out_dir: 存放裁剪结果的影像文件夹
|
||||
:param out_type: 裁剪结果影像文件类型
|
||||
:param file_type: 待裁剪的影像文件类型
|
||||
:param datetype:数据类型 int8,int16,float16,float32 等
|
||||
:return: True or Flase
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
img_dir, img_name = self.get_file_names(data_dir, file_type)
|
||||
|
||||
dir_dict = self.get_same_img(img_dir, img_name)
|
||||
count = 0
|
||||
for key in dir_dict.keys():
|
||||
dir_list = dir_dict[key]
|
||||
bands = ImageHandler.get_bands(dir_list[0])
|
||||
if bands > 1:
|
||||
temp_label = np.zeros(shape=(bands, h, w), dtype=datetype)
|
||||
for item in dir_list:
|
||||
name_split = item.split('_')
|
||||
x_start = int(name_split[-4])
|
||||
x_end = int(name_split[-3])
|
||||
y_start = int(name_split[-2])
|
||||
y_end = int(name_split[-1].split('.')[0])
|
||||
# img = Image.open(item)
|
||||
img = ImageHandler.get_band_array(item, 1)
|
||||
img = np.array(img)
|
||||
|
||||
temp_label[:, y_start:y_end, x_start:x_end] = img
|
||||
|
||||
img_name = key + '.' + out_type
|
||||
new_out_dir = os.path.join(out_dir, img_name)
|
||||
ImageHandler.write_img(new_out_dir, '', [0, 0, 0, 0, 0, 0], temp_label)
|
||||
# label = Image.fromarray(temp_label)
|
||||
# label.save(new_out_dir)
|
||||
count += 1
|
||||
else:
|
||||
temp_label = np.zeros(shape=(h, w), dtype=datetype)
|
||||
for item in dir_list:
|
||||
name_split = item.split('_')
|
||||
x_start = int(name_split[-4])
|
||||
x_end = int(name_split[-3])
|
||||
y_start = int(name_split[-2])
|
||||
y_end = int(name_split[-1].split('.')[0])
|
||||
# img = Image.open(item)
|
||||
img = ImageHandler.get_band_array(item, 1)
|
||||
img = np.array(img)
|
||||
|
||||
temp_label[y_start:y_end, x_start:x_end] = img
|
||||
|
||||
img_name = key + '.' + out_type
|
||||
new_out_dir = os.path.join(out_dir, img_name)
|
||||
ImageHandler.write_img(new_out_dir, '', [0, 0, 0, 0, 0, 0], temp_label)
|
||||
# label = Image.fromarray(temp_label)
|
||||
# label.save(new_out_dir)
|
||||
|
||||
count += 1
|
||||
return True
|
||||
|
||||
def combine_Tif(self, data_dir, w, h, out_dir, proj, geo, out_type='tif', file_type=['tif', 'tiff'],
|
||||
datetype='float16'):
|
||||
"""
|
||||
将文件夹下的tif拼接成一个大的tif
|
||||
:param data_dir: 存放待裁剪的影像文件夹,不用指定到tif文件
|
||||
:param w 拼接影像的宽度,
|
||||
:param h 拼接影像的高度
|
||||
:param out_dir: 存放裁剪结果的影像文件夹
|
||||
:param proj: 指定投影系
|
||||
:param geo: 指定变换参数
|
||||
:param out_type: 裁剪结果影像文件类型
|
||||
:param file_type: 待裁剪的影像文件类型
|
||||
:param datetype:数据类型 int8,int16,float16,float32 等
|
||||
:return: True or Flase
|
||||
"""
|
||||
image_handler = ImageHandler()
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
img_dir, img_name = self.get_file_names(data_dir, file_type)
|
||||
|
||||
dir_dict = self.get_same_img(img_dir, img_name)
|
||||
count = 0
|
||||
for key in dir_dict.keys():
|
||||
temp_label = np.zeros(shape=(h, w), dtype=datetype)
|
||||
dir_list = dir_dict[key]
|
||||
for item in dir_list:
|
||||
name_split = item.split('_')
|
||||
x_start = int(name_split[-4])
|
||||
x_end = int(name_split[-3])
|
||||
y_start = int(name_split[-2])
|
||||
y_end = int(name_split[-1].split('.')[0])
|
||||
img = image_handler.get_data(item)
|
||||
temp_label[x_start:x_end, y_start:y_end] = img
|
||||
|
||||
img_name = key + '.' + out_type
|
||||
new_out_dir = os.path.join(out_dir,img_name)
|
||||
image_handler.write_img(new_out_dir, proj, geo, temp_label)
|
||||
count += 1
|
||||
return True
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# bp = BlockProcess()
|
||||
# # # cut
|
||||
# data_dir = r"D:\micro\WorkSpace\LandCover\Temporary\processing\feature_tif\cut"
|
||||
# out_dir = r"D:\micro\WorkSpace\LandCover\Temporary\processing\feature_tif\combine"
|
||||
# file_type = ['tif']
|
||||
# out_type = 'tif'
|
||||
# cut_size = 1024
|
||||
# #
|
||||
# bp.cut_new(data_dir, out_dir, file_type, out_type, cut_size)
|
||||
# # # combine
|
||||
# # data_dir=r"D:\Workspace\SoilMoisture\Temporary\test"
|
||||
# w= 5043
|
||||
# h= 1239
|
||||
# out_dirs=r"D:\BaiduNetdiskDownload\HF\cut_outs"
|
||||
# # out_type='tif'
|
||||
# # file_type=['tif']
|
||||
# datetype = 'float'
|
||||
# # src_path = r"D:\Workspace\SoilMoisture\Temporary\preprocessed\HH_preprocessed.tif"
|
||||
# # datetype = bp.get_tif_dtype(src_path)
|
||||
# bp.combine_new(out_dir, w, h, out_dirs, out_type, file_type, datetype)
|
||||
|
||||
#
|
||||
# # 添加地理信息
|
||||
# new_out_dir =r"D:\DATA\testdata1\combine\TEST_20200429_NDVI.tif"
|
||||
# bp.assign_spatial_reference_byfile(src_path, new_out_dir)
|
||||
|
||||
# fn = r'D:\Workspace\SoilMoisture\Temporary\combine\soil_moisture.tif'
|
||||
# product_path = r'D:\Workspace\SoilMoisture\Temporary\combine\soil_moisture_1.tif'
|
||||
#
|
||||
# proj, geos, img = ImageHandler.read_img(fn)
|
||||
# img[img>1] = 1
|
||||
# img[img<0] = 0
|
||||
# ImageHandler.write_img(product_path, proj, geos, img)
|
||||
|
|
@ -1,752 +0,0 @@
|
|||
"""
|
||||
@Project :microproduct
|
||||
@File :ImageHandle.py
|
||||
@Function :实现对待处理SAR数据的读取、格式标准化和处理完后保存文件功能
|
||||
@Author :LMM
|
||||
@Date :2021/10/19 14:39
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import os
|
||||
import time
|
||||
|
||||
from PIL import Image
|
||||
from osgeo import gdal
|
||||
from osgeo import osr
|
||||
import numpy as np
|
||||
from PIL import Image
|
||||
import cv2
|
||||
import logging
|
||||
|
||||
import math
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class ImageHandler:
|
||||
"""
|
||||
影像读取、编辑、保存
|
||||
"""
|
||||
def __init__(self):
|
||||
pass
|
||||
@staticmethod
|
||||
def get_dataset(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 图像句柄
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
return dataset
|
||||
|
||||
def get_scope(self, filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 图像范围
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
im_scope = self.cal_img_scope(dataset)
|
||||
del dataset
|
||||
return im_scope
|
||||
|
||||
@staticmethod
|
||||
def get_projection(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 地图投影信息
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
im_proj = dataset.GetProjection()
|
||||
del dataset
|
||||
return im_proj
|
||||
|
||||
@staticmethod
|
||||
def get_geotransform(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 从图像坐标空间(行、列),也称为(像素、线)到地理参考坐标空间(投影或地理坐标)的仿射变换
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
geotransform = dataset.GetGeoTransform()
|
||||
del dataset
|
||||
return geotransform
|
||||
|
||||
def get_invgeotransform(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 从地理参考坐标空间(投影或地理坐标)的到图像坐标空间(行、列
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
geotransform = dataset.GetGeoTransform()
|
||||
geotransform=gdal.InvGeoTransform(geotransform)
|
||||
del dataset
|
||||
return geotransform
|
||||
|
||||
@staticmethod
|
||||
def get_bands(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 影像的波段数
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
bands = dataset.RasterCount
|
||||
del dataset
|
||||
return bands
|
||||
|
||||
@staticmethod
|
||||
def geo2lonlat(dataset, x, y):
|
||||
"""
|
||||
将投影坐标转为经纬度坐标(具体的投影坐标系由给定数据确定)
|
||||
:param dataset: GDAL地理数据
|
||||
:param x: 投影坐标x
|
||||
:param y: 投影坐标y
|
||||
:return: 投影坐标(x, y)对应的经纬度坐标(lon, lat)
|
||||
"""
|
||||
prosrs = osr.SpatialReference()
|
||||
prosrs.ImportFromWkt(dataset.GetProjection())
|
||||
geosrs = prosrs.CloneGeogCS()
|
||||
ct = osr.CoordinateTransformation(prosrs, geosrs)
|
||||
coords = ct.TransformPoint(x, y)
|
||||
return coords[:2]
|
||||
|
||||
@staticmethod
|
||||
def get_band_array(filename, num=1):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:param num: 波段序号
|
||||
:return: 对应波段的矩阵数据
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
bands = dataset.GetRasterBand(num)
|
||||
array = bands.ReadAsArray(0, 0, bands.XSize, bands.YSize)
|
||||
|
||||
# if 'int' in str(array.dtype):
|
||||
# array[np.where(array == -9999)] = np.inf
|
||||
# else:
|
||||
# array[np.where(array < -9000.0)] = np.nan
|
||||
|
||||
del dataset
|
||||
return array
|
||||
|
||||
@staticmethod
|
||||
def get_data(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 获取所有波段的数据
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
im_width = dataset.RasterXSize
|
||||
im_height = dataset.RasterYSize
|
||||
im_data = dataset.ReadAsArray(0, 0, im_width, im_height)
|
||||
del dataset
|
||||
return im_data
|
||||
|
||||
@staticmethod
|
||||
def get_dataset(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 获取所有波段的数据
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
return dataset
|
||||
|
||||
@staticmethod
|
||||
def get_all_band_array(filename):
|
||||
"""
|
||||
(大气延迟算法)
|
||||
将ERA-5影像所有波段存为一个数组, 波段数在第三维度 get_data()->(37,8,8)
|
||||
:param filename: 影像路径 get_all_band_array ->(8,8,37)
|
||||
:return: 影像数组
|
||||
"""
|
||||
dataset = gdal.Open(filename)
|
||||
x_size = dataset.RasterXSize
|
||||
y_size = dataset.RasterYSize
|
||||
nums = dataset.RasterCount
|
||||
array = np.zeros((y_size, x_size, nums), dtype=float)
|
||||
if nums == 1:
|
||||
bands_0 = dataset.GetRasterBand(1)
|
||||
array = bands_0.ReadAsArray(0, 0, x_size, y_size)
|
||||
else:
|
||||
for i in range(0, nums):
|
||||
bands = dataset.GetRasterBand(i+1)
|
||||
arr = bands.ReadAsArray(0, 0, x_size, y_size)
|
||||
array[:, :, i] = arr
|
||||
return array
|
||||
|
||||
@staticmethod
|
||||
def get_img_width(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 影像宽度
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
width = dataset.RasterXSize
|
||||
|
||||
del dataset
|
||||
return width
|
||||
|
||||
@staticmethod
|
||||
def get_img_height(filename):
|
||||
"""
|
||||
:param filename: tif路径
|
||||
:return: 影像高度
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
height = dataset.RasterYSize
|
||||
del dataset
|
||||
return height
|
||||
|
||||
@staticmethod
|
||||
def read_img(filename):
|
||||
"""
|
||||
影像读取
|
||||
:param filename:
|
||||
:return:
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
img_dataset = gdal.Open(filename) # 打开文件
|
||||
|
||||
if img_dataset is None:
|
||||
msg = 'Could not open ' + filename
|
||||
logger.error(msg)
|
||||
return None, None, None
|
||||
|
||||
im_proj = img_dataset.GetProjection() # 地图投影信息
|
||||
if im_proj is None:
|
||||
return None, None, None
|
||||
im_geotrans = img_dataset.GetGeoTransform() # 仿射矩阵
|
||||
|
||||
im_width = img_dataset.RasterXSize # 栅格矩阵的行数
|
||||
im_height = img_dataset.RasterYSize # 栅格矩阵的行数
|
||||
im_arr = img_dataset.ReadAsArray(0, 0, im_width, im_height)
|
||||
del img_dataset
|
||||
return im_proj, im_geotrans, im_arr
|
||||
|
||||
def cal_img_scope(self, dataset):
|
||||
"""
|
||||
计算影像的地理坐标范围
|
||||
根据GDAL的六参数模型将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换)
|
||||
:param dataset :GDAL地理数据
|
||||
:return: list[point_upleft, point_upright, point_downleft, point_downright]
|
||||
"""
|
||||
if dataset is None:
|
||||
return None
|
||||
|
||||
img_geotrans = dataset.GetGeoTransform()
|
||||
if img_geotrans is None:
|
||||
return None
|
||||
|
||||
width = dataset.RasterXSize # 栅格矩阵的列数
|
||||
height = dataset.RasterYSize # 栅格矩阵的行数
|
||||
|
||||
point_upleft = self.trans_rowcol2geo(img_geotrans, 0, 0)
|
||||
point_upright = self.trans_rowcol2geo(img_geotrans, width, 0)
|
||||
point_downleft = self.trans_rowcol2geo(img_geotrans, 0, height)
|
||||
point_downright = self.trans_rowcol2geo(img_geotrans, width, height)
|
||||
|
||||
return [point_upleft, point_upright, point_downleft, point_downright]
|
||||
|
||||
@staticmethod
|
||||
def get_scope_ori_sim(filename):
|
||||
"""
|
||||
计算影像的地理坐标范围
|
||||
根据GDAL的六参数模型将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换)
|
||||
:param dataset :GDAL地理数据
|
||||
:return: list[point_upleft, point_upright, point_downleft, point_downright]
|
||||
"""
|
||||
gdal.AllRegister()
|
||||
dataset = gdal.Open(filename)
|
||||
if dataset is None:
|
||||
return None
|
||||
|
||||
width = dataset.RasterXSize # 栅格矩阵的列数
|
||||
height = dataset.RasterYSize # 栅格矩阵的行数
|
||||
|
||||
band1 = dataset.GetRasterBand(1)
|
||||
array1 = band1.ReadAsArray(0, 0, band1.XSize, band1.YSize)
|
||||
|
||||
band2 = dataset.GetRasterBand(2)
|
||||
array2 = band2.ReadAsArray(0, 0, band2.XSize, band2.YSize)
|
||||
|
||||
if array1[0, 0] < array1[0, width-1]:
|
||||
point_upleft = [array1[0, 0], array2[0, 0]]
|
||||
point_upright = [array1[0, width-1], array2[0, width-1]]
|
||||
else:
|
||||
point_upright = [array1[0, 0], array2[0, 0]]
|
||||
point_upleft = [array1[0, width-1], array2[0, width-1]]
|
||||
|
||||
|
||||
if array1[height-1, 0] < array1[height-1, width-1]:
|
||||
point_downleft = [array1[height - 1, 0], array2[height - 1, 0]]
|
||||
point_downright = [array1[height - 1, width - 1], array2[height - 1, width - 1]]
|
||||
else:
|
||||
point_downright = [array1[height - 1, 0], array2[height - 1, 0]]
|
||||
point_downleft = [array1[height - 1, width - 1], array2[height - 1, width - 1]]
|
||||
|
||||
|
||||
if(array2[0, 0] < array2[height - 1, 0]):
|
||||
#上下调换顺序
|
||||
tmp1 = point_upleft
|
||||
point_upleft = point_downleft
|
||||
point_downleft = tmp1
|
||||
|
||||
tmp2 = point_upright
|
||||
point_upright = point_downright
|
||||
point_downright = tmp2
|
||||
|
||||
return [point_upleft, point_upright, point_downleft, point_downright]
|
||||
|
||||
|
||||
@staticmethod
|
||||
def trans_rowcol2geo(img_geotrans,img_col, img_row):
|
||||
"""
|
||||
据GDAL的六参数模型仿射矩阵将影像图上坐标(行列号)转为投影坐标或地理坐标(根据具体数据的坐标系统转换)
|
||||
:param img_geotrans: 仿射矩阵
|
||||
:param img_col:图像纵坐标
|
||||
:param img_row:图像横坐标
|
||||
:return: [geo_x,geo_y]
|
||||
"""
|
||||
geo_x = img_geotrans[0] + img_geotrans[1] * img_col + img_geotrans[2] * img_row
|
||||
geo_y = img_geotrans[3] + img_geotrans[4] * img_col + img_geotrans[5] * img_row
|
||||
return [geo_x, geo_y]
|
||||
|
||||
@staticmethod
|
||||
def write_era_into_img(filename, im_proj, im_geotrans, im_data):
|
||||
"""
|
||||
影像保存
|
||||
:param filename:
|
||||
:param im_proj:
|
||||
:param im_geotrans:
|
||||
:param im_data:
|
||||
:return:
|
||||
"""
|
||||
gdal_dtypes = {
|
||||
'int8': gdal.GDT_Byte,
|
||||
'unit16': gdal.GDT_UInt16,
|
||||
'int16': gdal.GDT_Int16,
|
||||
'unit32': gdal.GDT_UInt32,
|
||||
'int32': gdal.GDT_Int32,
|
||||
'float32': gdal.GDT_Float32,
|
||||
'float64': gdal.GDT_Float64,
|
||||
}
|
||||
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
|
||||
datatype = gdal_dtypes[im_data.dtype.name]
|
||||
else:
|
||||
datatype = gdal.GDT_Float32
|
||||
|
||||
# 判读数组维数
|
||||
if len(im_data.shape) == 3:
|
||||
im_height, im_width, im_bands = im_data.shape # shape[0] 行数
|
||||
else:
|
||||
im_bands, (im_height, im_width) = 1, im_data.shape
|
||||
|
||||
# 创建文件
|
||||
if os.path.exists(os.path.split(filename)[0]) is False:
|
||||
os.makedirs(os.path.split(filename)[0])
|
||||
|
||||
driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间
|
||||
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
|
||||
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
|
||||
dataset.SetProjection(im_proj) # 写入投影
|
||||
|
||||
if im_bands == 1:
|
||||
dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据
|
||||
else:
|
||||
for i in range(im_bands):
|
||||
dataset.GetRasterBand(i + 1).WriteArray(im_data[:, :, i])
|
||||
# dataset.GetRasterBand(i + 1).WriteArray(im_data[i])
|
||||
del dataset
|
||||
|
||||
# 写GeoTiff文件
|
||||
|
||||
@staticmethod
|
||||
def lat_lon_to_pixel(raster_dataset_path, location):
|
||||
"""From zacharybears.com/using-python-to-translate-latlon-locations-to-pixels-on-a-geotiff/."""
|
||||
gdal.AllRegister()
|
||||
raster_dataset = gdal.Open(raster_dataset_path)
|
||||
if raster_dataset is None:
|
||||
return None
|
||||
ds = raster_dataset
|
||||
gt = ds.GetGeoTransform()
|
||||
srs = osr.SpatialReference()
|
||||
srs.ImportFromWkt(ds.GetProjection())
|
||||
srs_lat_lon = srs.CloneGeogCS()
|
||||
ct = osr.CoordinateTransformation(srs_lat_lon, srs)
|
||||
new_location = [None, None]
|
||||
# Change the point locations into the GeoTransform space
|
||||
(new_location[1], new_location[0], holder) = ct.TransformPoint(location[1], location[0])
|
||||
# Translate the x and y coordinates into pixel values
|
||||
Xp = new_location[0]
|
||||
Yp = new_location[1]
|
||||
dGeoTrans = gt
|
||||
dTemp = dGeoTrans[1] * dGeoTrans[5] - dGeoTrans[2] * dGeoTrans[4]
|
||||
Xpixel = (dGeoTrans[5] * (Xp - dGeoTrans[0]) - dGeoTrans[2] * (Yp - dGeoTrans[3])) / dTemp
|
||||
Yline = (dGeoTrans[1] * (Yp - dGeoTrans[3]) - dGeoTrans[4] * (Xp - dGeoTrans[0])) / dTemp
|
||||
del raster_dataset
|
||||
return (Xpixel, Yline)
|
||||
|
||||
@staticmethod
|
||||
def write_img(filename, im_proj, im_geotrans, im_data, no_data='0'):
|
||||
"""
|
||||
影像保存
|
||||
:param filename: 保存的路径
|
||||
:param im_proj:
|
||||
:param im_geotrans:
|
||||
:param im_data:
|
||||
:param no_data: 把无效值设置为 nodata
|
||||
:return:
|
||||
"""
|
||||
|
||||
gdal_dtypes = {
|
||||
'int8': gdal.GDT_Byte,
|
||||
'unit16': gdal.GDT_UInt16,
|
||||
'int16': gdal.GDT_Int16,
|
||||
'unit32': gdal.GDT_UInt32,
|
||||
'int32': gdal.GDT_Int32,
|
||||
'float32': gdal.GDT_Float32,
|
||||
'float64': gdal.GDT_Float64,
|
||||
}
|
||||
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
|
||||
datatype = gdal_dtypes[im_data.dtype.name]
|
||||
else:
|
||||
datatype = gdal.GDT_Float32
|
||||
flag = False
|
||||
# 判读数组维数
|
||||
if len(im_data.shape) == 3:
|
||||
im_bands, im_height, im_width = im_data.shape
|
||||
flag = True
|
||||
else:
|
||||
im_bands, (im_height, im_width) = 1, im_data.shape
|
||||
|
||||
# 创建文件
|
||||
if os.path.exists(os.path.split(filename)[0]) is False:
|
||||
os.makedirs(os.path.split(filename)[0])
|
||||
|
||||
driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间
|
||||
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
|
||||
|
||||
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
|
||||
|
||||
dataset.SetProjection(im_proj) # 写入投影
|
||||
|
||||
if im_bands == 1:
|
||||
# outRaster.GetRasterBand(1).WriteArray(array) # 写入数组数据
|
||||
if flag:
|
||||
outband = dataset.GetRasterBand(1)
|
||||
outband.WriteArray(im_data[0])
|
||||
if no_data != 'null':
|
||||
outband.SetNoDataValue(np.double(no_data))
|
||||
outband.FlushCache()
|
||||
else:
|
||||
outband = dataset.GetRasterBand(1)
|
||||
outband.WriteArray(im_data)
|
||||
if no_data != 'null':
|
||||
outband.SetNoDataValue(np.double(no_data))
|
||||
outband.FlushCache()
|
||||
else:
|
||||
for i in range(im_bands):
|
||||
outband = dataset.GetRasterBand(1 + i)
|
||||
outband.WriteArray(im_data[i])
|
||||
if no_data != 'null':
|
||||
outband.SetNoDataValue(np.double(no_data))
|
||||
outband.FlushCache()
|
||||
# outRaster.GetRasterBand(i + 1).WriteArray(array[i])
|
||||
del dataset
|
||||
|
||||
# 写GeoTiff文件
|
||||
|
||||
@staticmethod
|
||||
def write_img_envi(filename, im_proj, im_geotrans, im_data, no_data='null'):
|
||||
"""
|
||||
影像保存
|
||||
:param filename: 保存的路径
|
||||
:param im_proj:
|
||||
:param im_geotrans:
|
||||
:param im_data:
|
||||
:param no_data: 把无效值设置为 nodata
|
||||
:return:
|
||||
"""
|
||||
|
||||
gdal_dtypes = {
|
||||
'int8': gdal.GDT_Byte,
|
||||
'unit16': gdal.GDT_UInt16,
|
||||
'int16': gdal.GDT_Int16,
|
||||
'unit32': gdal.GDT_UInt32,
|
||||
'int32': gdal.GDT_Int32,
|
||||
'float32': gdal.GDT_Float32,
|
||||
'float64': gdal.GDT_Float64,
|
||||
}
|
||||
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
|
||||
datatype = gdal_dtypes[im_data.dtype.name]
|
||||
else:
|
||||
datatype = gdal.GDT_Float32
|
||||
|
||||
# 判读数组维数
|
||||
if len(im_data.shape) == 3:
|
||||
im_bands, im_height, im_width = im_data.shape
|
||||
else:
|
||||
im_bands, (im_height, im_width) = 1, im_data.shape
|
||||
|
||||
# 创建文件
|
||||
if os.path.exists(os.path.split(filename)[0]) is False:
|
||||
os.makedirs(os.path.split(filename)[0])
|
||||
|
||||
driver = gdal.GetDriverByName("ENVI") # 数据类型必须有,因为要计算需要多大内存空间
|
||||
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
|
||||
|
||||
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
|
||||
|
||||
dataset.SetProjection(im_proj) # 写入投影
|
||||
|
||||
if im_bands == 1:
|
||||
# outRaster.GetRasterBand(1).WriteArray(array) # 写入数组数据
|
||||
outband = dataset.GetRasterBand(1)
|
||||
outband.WriteArray(im_data)
|
||||
if no_data != 'null':
|
||||
outband.SetNoDataValue(no_data)
|
||||
outband.FlushCache()
|
||||
else:
|
||||
for i in range(im_bands):
|
||||
outband = dataset.GetRasterBand(1 + i)
|
||||
outband.WriteArray(im_data[i])
|
||||
outband.FlushCache()
|
||||
# outRaster.GetRasterBand(i + 1).WriteArray(array[i])
|
||||
del dataset
|
||||
|
||||
@staticmethod
|
||||
def write_img_rpc(filename, im_proj, im_geotrans, im_data, rpc_dict):
|
||||
"""
|
||||
图像中写入rpc信息
|
||||
"""
|
||||
# 判断栅格数据的数据类型
|
||||
if 'int8' in im_data.dtype.name:
|
||||
datatype = gdal.GDT_Byte
|
||||
elif 'int16' in im_data.dtype.name:
|
||||
datatype = gdal.GDT_Int16
|
||||
else:
|
||||
datatype = gdal.GDT_Float32
|
||||
|
||||
# 判读数组维数
|
||||
if len(im_data.shape) == 3:
|
||||
im_bands, im_height, im_width = im_data.shape
|
||||
else:
|
||||
im_bands, (im_height, im_width) = 1, im_data.shape
|
||||
|
||||
# 创建文件
|
||||
driver = gdal.GetDriverByName("GTiff")
|
||||
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
|
||||
|
||||
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
|
||||
dataset.SetProjection(im_proj) # 写入投影
|
||||
|
||||
# 写入RPC参数
|
||||
for k in rpc_dict.keys():
|
||||
dataset.SetMetadataItem(k, rpc_dict[k], 'RPC')
|
||||
|
||||
if im_bands == 1:
|
||||
dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据
|
||||
else:
|
||||
for i in range(im_bands):
|
||||
dataset.GetRasterBand(i + 1).WriteArray(im_data[i])
|
||||
|
||||
del dataset
|
||||
|
||||
|
||||
def transtif2mask(self,out_tif_path, in_tif_path, threshold):
|
||||
"""
|
||||
:param out_tif_path:输出路径
|
||||
:param in_tif_path:输入的路径
|
||||
:param threshold:阈值
|
||||
"""
|
||||
im_proj, im_geotrans, im_arr, im_scope = self.read_img(in_tif_path)
|
||||
im_arr_mask = (im_arr < threshold).astype(int)
|
||||
self.write_img(out_tif_path, im_proj, im_geotrans, im_arr_mask)
|
||||
|
||||
def write_quick_view(self, tif_path, color_img=False, quick_view_path=None):
|
||||
"""
|
||||
生成快视图,默认快视图和影像同路径且同名
|
||||
:param tif_path:影像路径
|
||||
:param color_img:是否生成随机伪彩色图
|
||||
:param quick_view_path:快视图路径
|
||||
"""
|
||||
if quick_view_path is None:
|
||||
quick_view_path = os.path.splitext(tif_path)[0]+'.jpg'
|
||||
|
||||
n = self.get_bands(tif_path)
|
||||
if n == 1: # 单波段
|
||||
t_data = self.get_data(tif_path)
|
||||
else: # 多波段,转为强度数据
|
||||
t_data = self.get_data(tif_path)
|
||||
t_data = t_data.astype(float)
|
||||
t_data = np.sqrt(t_data[0] ** 2 + t_data[1] ** 2)
|
||||
|
||||
t_r = self.get_img_height(tif_path)
|
||||
t_c = self.get_img_width(tif_path)
|
||||
if t_r > 10000 or t_c > 10000:
|
||||
q_r = int(t_r / 10)
|
||||
q_c = int(t_c / 10)
|
||||
elif 1024 < t_r < 10000 or 1024 < t_c < 10000:
|
||||
if t_r > t_c:
|
||||
q_r = 1024
|
||||
q_c = int(t_c/t_r * 1024)
|
||||
else:
|
||||
q_c = 1024
|
||||
q_r = int(t_r/t_c * 1024)
|
||||
else:
|
||||
q_r = t_r
|
||||
q_c = t_c
|
||||
|
||||
if color_img is True:
|
||||
# 生成伪彩色图
|
||||
img = np.zeros((t_r, t_c, 3), dtype=np.uint8) # (高,宽,维度)
|
||||
u = np.unique(t_data)
|
||||
for i in u:
|
||||
if i != 0:
|
||||
w = np.where(t_data == i)
|
||||
img[w[0], w[1], 0] = np.random.randint(0, 255) # 随机生成一个0到255之间的整数 可以通过挑参数设定不同的颜色范围
|
||||
img[w[0], w[1], 1] = np.random.randint(0, 255)
|
||||
img[w[0], w[1], 2] = np.random.randint(0, 255)
|
||||
|
||||
img = cv2.resize(img, (q_c, q_r)) # (宽,高)
|
||||
cv2.imwrite(quick_view_path, img)
|
||||
# cv2.imshow("result4", img)
|
||||
# cv2.waitKey(0)
|
||||
else:
|
||||
# 灰度图
|
||||
min = np.percentile(t_data, 2) # np.nanmin(t_data)
|
||||
max = np.percentile(t_data, 98) # np.nanmax(t_data)
|
||||
t_data[np.isnan(t_data)] = max
|
||||
if (max - min) < 256:
|
||||
t_data = (t_data - min) / (max - min) * 255
|
||||
out_img = Image.fromarray(t_data)
|
||||
out_img = out_img.resize((q_c, q_r)) # 重采样
|
||||
out_img = out_img.convert("L") # 转换成灰度图
|
||||
out_img.save(quick_view_path)
|
||||
|
||||
def limit_field(self, out_path, in_path, min_value, max_value):
|
||||
"""
|
||||
:param out_path:输出路径
|
||||
:param in_path:主mask路径,输出影像采用主mask的地理信息
|
||||
:param min_value
|
||||
:param max_value
|
||||
"""
|
||||
proj = self.get_projection(in_path)
|
||||
geotrans = self.get_geotransform(in_path)
|
||||
array = self.get_band_array(in_path, 1)
|
||||
array[array < min_value] = min_value
|
||||
array[array > max_value] = max_value
|
||||
self.write_img(out_path, proj, geotrans, array)
|
||||
return True
|
||||
|
||||
def band_merge(self, lon, lat, ori_sim):
|
||||
lon_arr = self.get_data(lon)
|
||||
lat_arr = self.get_data(lat)
|
||||
temp = np.zeros((2, lon_arr.shape[0], lon_arr.shape[1]), dtype=float)
|
||||
temp[0, :, :] = lon_arr[:, :]
|
||||
temp[1, :, :] = lat_arr[:, :]
|
||||
self.write_img(ori_sim, '', [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], temp, '0')
|
||||
|
||||
|
||||
def get_scopes(self, ori_sim):
|
||||
ori_sim_data = self.get_data(ori_sim)
|
||||
lon = ori_sim_data[0, :, :]
|
||||
lat = ori_sim_data[1, :, :]
|
||||
|
||||
min_lon = np.nanmin(np.where((lon != 0) & ~np.isnan(lon), lon, np.inf))
|
||||
max_lon = np.nanmax(np.where((lon != 0) & ~np.isnan(lon), lon, -np.inf))
|
||||
min_lat = np.nanmin(np.where((lat != 0) & ~np.isnan(lat), lat, np.inf))
|
||||
max_lat = np.nanmax(np.where((lat != 0) & ~np.isnan(lat), lat, -np.inf))
|
||||
|
||||
scopes = [[min_lon, max_lat], [max_lon, max_lat], [min_lon, min_lat], [max_lon, min_lat]]
|
||||
return scopes
|
||||
|
||||
@staticmethod
|
||||
def dem_merged(in_dem_path, out_dem_path):
|
||||
'''
|
||||
DEM重采样函数,默认坐标系为WGS84
|
||||
agrs:
|
||||
in_dem_path: 输入的DEM文件夹路径
|
||||
meta_file_path: 输入的xml元文件路径
|
||||
out_dem_path: 输出的DEM文件夹路径
|
||||
'''
|
||||
# 读取文件夹中所有的DEM
|
||||
dem_file_paths = [os.path.join(in_dem_path, dem_name) for dem_name in os.listdir(in_dem_path) if
|
||||
dem_name.find(".tif") >= 0 and dem_name.find(".tif.") == -1]
|
||||
spatialreference = osr.SpatialReference()
|
||||
spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree
|
||||
spatialproj = spatialreference.ExportToWkt() # 导出投影结果
|
||||
# 将DEM拼接成一张大图
|
||||
mergeFile = gdal.BuildVRT(os.path.join(out_dem_path, "mergedDEM_VRT.tif"), dem_file_paths)
|
||||
out_DEM = os.path.join(out_dem_path, "mergedDEM.tif")
|
||||
gdal.Warp(out_DEM,
|
||||
mergeFile,
|
||||
format="GTiff",
|
||||
dstSRS=spatialproj,
|
||||
dstNodata=-9999,
|
||||
outputType=gdal.GDT_Float32)
|
||||
time.sleep(3)
|
||||
# gdal.CloseDir(out_DEM)
|
||||
return out_DEM
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
fn = r'D:\micro\LWork\20230404\20230404_Range.npy'
|
||||
a = np.load(fn)
|
||||
LIGHTSPEED = 299792458
|
||||
nRange = 4.96786423292669768E-03 * (LIGHTSPEED / 2)
|
||||
r = 1.66551365555555564
|
||||
slrange = nRange + 6684 * r
|
||||
|
||||
print(a)
|
||||
# flatEffect = ((-4) * np.pi / 0.511) * a
|
||||
# h, w = flatEffect.shape
|
||||
# Range = np.zeros((h, w), dtype=float)
|
||||
# one = np.ones((1, w), dtype=int)
|
||||
# Range[0:500, :] = one
|
||||
# print(flatEffect)
|
||||
# fn = r'D:\micro\WorkSpace\LandCover-old\Output\GF3_KSC_QPSI_036065_E116.4_N44.2_20230616_L1A_AHV_L10006792277-ortho-LANDClASS\GF3_KSC_QPSI_036065_E116.4_N44.2_20230616_L1A_AHV_L10006792277-ortho-LANDCLASS.tif'
|
||||
# out = r'D:\micro\WorkSpace\LandCover-old\Output\GF3_KSC_QPSI_036065_E116.4_N44.2_20230616_L1A_AHV_L10006792277-ortho-LANDClASS\test.tif'
|
||||
# im_proj, im_geotrans, im_arr = ImageHandler.read_img(fn)
|
||||
# im_arr[np.where(im_arr == 302)] = 32
|
||||
# im_arr[np.where(im_arr == 204)] = 24
|
||||
# im_arr[np.where(im_arr == 401)] = 41
|
||||
# im_arr[np.where(im_arr == 501)] = 51
|
||||
# ImageHandler.write_img(out, im_proj, im_geotrans, im_arr, '0')
|
||||
|
||||
|
||||
# path = r'D:\BaiduNetdiskDownload\GZ\lon.rdr'
|
||||
# path2 = r'D:\BaiduNetdiskDownload\GZ\lat.rdr'
|
||||
# path3 = r'D:\BaiduNetdiskDownload\GZ\lon_lat.tif'
|
||||
# s = ImageHandler().band_merge(path, path2, path3)
|
||||
# print(s)
|
||||
# pass
|
|
@ -1,185 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:SalinityMain.py
|
||||
@File:MonteCarloSampling.py
|
||||
@Function:基于蒙特卡洛随机抽样的最优特征选择算法
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/10/19 11:30
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import numpy as np
|
||||
from numpy import random
|
||||
import matplotlib.pyplot as plt
|
||||
import seaborn as sns
|
||||
import logging
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
def api_sel_feature(x_list, iter=100, alpha=0.5, ts=-0.5, iter_ratio=0.2):
|
||||
"""
|
||||
:para x_list: k类别的单个特征的训练样本 [X1,X2,X3,...,Xi,...,Xk],
|
||||
Xi = np.array([x1,x2,x3...xn]), 第i类别的训练样本数为n
|
||||
:para iter: 迭代次数
|
||||
:para alpha: 调节因子
|
||||
:para ts: com_sep_coef的阈值
|
||||
:para iter_ratio : 迭代次数阈值
|
||||
:return : True-特征与类别相关度高,False-特征与类别相关度低
|
||||
"""
|
||||
com_sep_coef_old = cal_com_sep_coef(x_list, alpha)
|
||||
# print('com_sep_coef_old:', com_sep_coef_old)
|
||||
if com_sep_coef_old < ts:
|
||||
return False, com_sep_coef_old
|
||||
|
||||
X = np.zeros(1) # x_list组合为行向量X
|
||||
x_len_list = [] # 记录每个类别x的位置
|
||||
num_sampler = 0 # 样本总数
|
||||
t = 0
|
||||
flag = 0
|
||||
for x in x_list:
|
||||
len_x = len(x)
|
||||
if t == 0:
|
||||
X = x
|
||||
x_len_list.append(len_x)
|
||||
else:
|
||||
X = np.hstack([X, x])
|
||||
x_len_list.append(x_len_list[t - 1] + len_x)
|
||||
num_sampler += len_x
|
||||
t += 1
|
||||
x_len_list.pop()
|
||||
num = int(np.ceil(num_sampler / 3))
|
||||
|
||||
for i in range(iter):
|
||||
# 生成随机数组
|
||||
randmtx = np.random.rand(1, num)
|
||||
randmtx_ceil = np.ceil(randmtx * num_sampler).astype(int)
|
||||
randmtx_ceil = np.sort(randmtx_ceil[0, :]) - 1
|
||||
|
||||
# 随机取值,重排后,替换原来的数据,组成新数组
|
||||
X_new_sel = X.copy()
|
||||
X_new_sel[randmtx_ceil] = np.random.permutation(X[randmtx_ceil])
|
||||
|
||||
X_new_list = np.split(X_new_sel, x_len_list)
|
||||
com_sep_coef_new = cal_com_sep_coef(X_new_list, alpha)
|
||||
if com_sep_coef_new <= com_sep_coef_old:
|
||||
flag += 1
|
||||
# print('com_sep_coef_new:', com_sep_coef_new)
|
||||
logger.info('flag:' + str(flag) +', iter:' + str(iter) + ', falg/iter:' + str(int(flag)/int(iter)))
|
||||
if flag > (iter * iter_ratio):
|
||||
return False, com_sep_coef_old
|
||||
return True, com_sep_coef_old
|
||||
|
||||
def cal_com_coef(x_list):
|
||||
"""
|
||||
:para x_list: k类别的单个特征的训练样本 [X1,X2,X3,...,Xi,...,Xk],Xi = np.array([x1,x2,x3...xn]), 第i类别的训练样本数为n
|
||||
:return com_coef : 类内聚合因子(Compactness Coefficient)
|
||||
"""
|
||||
class_num = len(x_list)
|
||||
coef_array = np.full((1, class_num), 0.0)
|
||||
for m in range(class_num):
|
||||
sample_num = len(x_list[m])
|
||||
c = np.full((1, sample_num), 0.0)
|
||||
for u in range(sample_num):
|
||||
l = np.full((1, sample_num), x_list[m][u])
|
||||
c[0, u] = np.sum(np.abs(l - x_list[m]))
|
||||
coef_array[0, m] = np.sum(c) / (sample_num * (sample_num - 1))
|
||||
com_coef = np.sum(coef_array) / class_num
|
||||
return com_coef
|
||||
|
||||
def cal_sep_coef(x_list):
|
||||
"""
|
||||
:para x_list : k类别的单个特征的训练样本 [X1,X2,X3,...,Xi,...,Xk],Xi = np.array([x1,x2,x3...xn]), 第i类别的训练样本数为n
|
||||
:return sep_coef : 类间离散度(Separation Coefficient)
|
||||
"""
|
||||
class_num = len(x_list)
|
||||
coef_list = []
|
||||
coef_sum = 0
|
||||
for m in range(class_num):
|
||||
xm = x_list[m]
|
||||
l_xm = len(xm)
|
||||
for n in range(class_num):
|
||||
if not n == m:
|
||||
xn = x_list[n]
|
||||
l_xn = len(xn)
|
||||
xm = np.expand_dims(xm, 1)
|
||||
coef_list.append(np.sum(np.abs(xm - xn)) / (l_xm * l_xn))
|
||||
for coef in coef_list:
|
||||
coef_sum = coef_sum + coef
|
||||
|
||||
if class_num == 1 or class_num == 0:
|
||||
sep_coef = coef_sum
|
||||
else:
|
||||
sep_coef = coef_sum / (class_num * (class_num - 1))
|
||||
return sep_coef
|
||||
|
||||
def cal_com_sep_coef(x_list, alpha = 0.5):
|
||||
"""
|
||||
:para x_list: k类别的单个特征的训练样本 [X1,X2,X3,...,Xi,...,Xk],Xi = np.array([x1,x2,x3...xn]), 第i类别的训练样本数为n
|
||||
:para alpha : 调节因子
|
||||
:return com_sep_coef: 类内聚合度和类间离散度的因子(Compactness- Separation Coeffcient)
|
||||
"""
|
||||
if not alpha >= 0 and alpha <= 1:
|
||||
raise ('input_para_alpha beyond (0,1)!')
|
||||
com_coef = cal_com_coef(x_list)
|
||||
sep_coef = cal_sep_coef(x_list)
|
||||
com_sep_coef = alpha * com_coef - (1-alpha) * sep_coef
|
||||
return com_sep_coef
|
||||
|
||||
def get_logistic_rand_number(num, u=0.4): #弃用
|
||||
randmtx = np.full((1, num), 0.0)
|
||||
# randmtx[0,0] = np.random.rand(1, 1) #随机初始值
|
||||
randmtx[0, 0] = 0.5 #初始值
|
||||
|
||||
for i in range(1, num):
|
||||
randmtx[0, i] = u * randmtx[0, i-1]*(1-randmtx[0, i-1])
|
||||
randmtx = randmtx * 3 * num
|
||||
randmtx_ceil = np.ceil(randmtx)
|
||||
|
||||
# 绘制随机数分布图
|
||||
# randmty = np.arange(0,num,1)
|
||||
# randmty = np.expand_dims( randmty, 1)
|
||||
# fig, axes = plt.subplots(1, 1, figsize=(5, 5))
|
||||
# axes.scatter(randmty, randmtx_ceil, alpha=.3, label='ground truth')
|
||||
# axes.legend()
|
||||
# plt.tight_layout()
|
||||
# plt.show()
|
||||
return randmtx_ceil
|
||||
|
||||
def test():
|
||||
'''测试生成随机数'''
|
||||
# 插入
|
||||
# a = np.array([3.4, 2.5, 1.8, 4.7, 5.6, 2.1])
|
||||
# b = np.array([2.5, 4.7, 5.6])
|
||||
# c = a[[0,1]]
|
||||
# a[[0,1]] = np.array([1, 1])
|
||||
|
||||
# 随机排列
|
||||
random.shuffle()
|
||||
|
||||
# logist随机数
|
||||
sns.distplot(random.normal(scale=2, size=1000), hist=False, label='normal')
|
||||
sns.distplot(random.logistic(loc=2, scale=0.5, size=1000), hist=False, label='logistic')
|
||||
plt.show()
|
||||
|
||||
# 绘制随机数
|
||||
randmtx = random.logistic(loc=0.5, scale=0.5, size=100)
|
||||
randmtx.sort(axis=0)
|
||||
randmty = np.arange(0,100,1)
|
||||
randmty = np.expand_dims(randmty, 1)
|
||||
fig, axes = plt.subplots(1, 1, figsize=(5, 5))
|
||||
axes.scatter(randmty, randmtx, alpha=.3, label='ground truth')
|
||||
axes.legend()
|
||||
plt.tight_layout()
|
||||
plt.show()
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# 例子
|
||||
# x1 = np.array([1, 1.1])
|
||||
# x2 = np.array([2, 2.1, 2.2])
|
||||
# x3 = np.array([3, 3.4, 3.1])
|
||||
# x_list = [x1, x2, x3]
|
||||
# com_sep_coef = cal_com_sep_coef(x_list, 0.5)
|
||||
# flag = api_sel_feature(x_list)
|
||||
# print('done')
|
||||
|
||||
|
|
@ -1,422 +0,0 @@
|
|||
import sklearn # 用于解决打包错误
|
||||
import sklearn.utils # 用于解决打包错误
|
||||
import sklearn.utils._cython_blas # 用于解决打包错误
|
||||
import sklearn.utils._weight_vector # 用于解决打包错误
|
||||
import sklearn.neighbors # 用于解决打包错误
|
||||
import sklearn.neighbors._typedefs # 用于解决打包错误
|
||||
import sklearn.neighbors._partition_nodes # 用于解决打包错误
|
||||
import sklearn.neighbors._quad_tree # 用于解决打包错误
|
||||
import sklearn.tree._utils # 用于解决打包错误
|
||||
from sklearn.cross_decomposition import PLSRegression
|
||||
from sklearn.ensemble import ExtraTreesClassifier
|
||||
from sklearn.ensemble import RandomForestClassifier
|
||||
from sklearn.ensemble import ExtraTreesClassifier
|
||||
from sklearn.svm import SVC
|
||||
import numpy as np
|
||||
from scipy.stats import pearsonr
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.block.blockprocess import BlockProcess
|
||||
import logging
|
||||
import os
|
||||
import glob
|
||||
from PIL import Image
|
||||
from tool.file.fileHandle import fileHandle
|
||||
import multiprocessing
|
||||
logger = logging.getLogger("mylog")
|
||||
file = fileHandle()
|
||||
|
||||
class MachineLeaning:
|
||||
"""
|
||||
机器学习库
|
||||
"""
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def gene_optimal_train_set(train_data_dic, feature_tif_dir, important_threshold=0.3, correlation_threshold=0.7): # todo 修改特征重要性
|
||||
ml = MachineLeaning()
|
||||
name_list = ml.get_name_list(feature_tif_dir)
|
||||
X_train, Y_train = ml.gene_train_set(train_data_dic, feature_tif_dir)
|
||||
optimal_feature = ml.sel_optimal_feature_set(X_train, Y_train, threshold=important_threshold)
|
||||
optimal_feature = ml.remove_correlation_feature(X_train, optimal_feature, threshold=correlation_threshold)
|
||||
X_train = X_train[:, optimal_feature]
|
||||
logger.info('train_feature:%s', np.array(name_list)[optimal_feature])
|
||||
return X_train, Y_train, optimal_feature
|
||||
|
||||
@ staticmethod
|
||||
def sel_optimal_feature(X_train, Y_train, name_list,important_threshold=0.3, correlation_threshold=0.7):
|
||||
ml = MachineLeaning()
|
||||
optimal_feature = ml.sel_optimal_feature_set(X_train, Y_train, threshold=important_threshold)
|
||||
optimal_feature = ml.remove_correlation_feature(X_train, optimal_feature, threshold=correlation_threshold)
|
||||
X_train = X_train[:, optimal_feature]
|
||||
logger.info('train_feature:%s', np.array(name_list)[optimal_feature])
|
||||
return X_train, Y_train, optimal_feature
|
||||
|
||||
@staticmethod
|
||||
def gene_test_set(feature_tif_dir, optimal_feature):
|
||||
"""
|
||||
生成测试集
|
||||
:param feature_tif_dir : 特征影像路径字典
|
||||
:param optimal_feature : 最优特征子集
|
||||
:return X_test_list : 分块测试集影像路径
|
||||
"""
|
||||
in_tif_paths = list(glob.glob(os.path.join(feature_tif_dir, '*.tif')))
|
||||
cols = ImageHandler.get_img_width(in_tif_paths[0])
|
||||
rows = ImageHandler.get_img_height(in_tif_paths[0])
|
||||
workspace_block_tif_path = os.path.join(feature_tif_dir, 'block')
|
||||
workspace_block_feature_path = os.path.join(feature_tif_dir, 'feature')
|
||||
file.creat_dirs([workspace_block_tif_path, workspace_block_feature_path])
|
||||
|
||||
# 特征分块
|
||||
bp = BlockProcess()
|
||||
block_size = bp.get_block_size(rows, cols)
|
||||
|
||||
bp.cut(feature_tif_dir, workspace_block_tif_path, ['tif', 'tiff'], 'tif', block_size)
|
||||
img_dir, img_name = bp.get_file_names(workspace_block_tif_path, ['tif'])
|
||||
dir_dict_all = bp.get_same_img(img_dir, img_name)
|
||||
|
||||
# 选择最优特征子集特征影像
|
||||
dir_dict = {}
|
||||
for n, key in zip(range(len(dir_dict_all)), dir_dict_all):
|
||||
if n in optimal_feature:
|
||||
dir_dict.update({key: dir_dict_all[key]})
|
||||
logger.info('test_feature:%s', dir_dict.keys())
|
||||
logger.info('blocking tifs success!')
|
||||
X_test_list = []
|
||||
# 特征维度合并
|
||||
for key in dir_dict:
|
||||
key_name = key
|
||||
block_num = len(dir_dict[key])
|
||||
break
|
||||
for n in range(block_num):
|
||||
name = os.path.basename(dir_dict[key_name][n])
|
||||
suffix = '_' + name.split('_')[-4] + "_" + name.split('_')[-3] + "_" + name.split('_')[-2] + "_" + name.split('_')[-1]
|
||||
features_path = os.path.join(workspace_block_feature_path, "features" + suffix) # + "\\features" + suffix
|
||||
X_test_list.append(features_path)
|
||||
features_array = np.zeros((len(dir_dict), block_size, block_size), dtype='float32')
|
||||
for m, value in zip(range(len(dir_dict)), dir_dict.values()):
|
||||
features_array[m, :, :] = ImageHandler.get_band_array(value[n])
|
||||
features_array[np.isnan(features_array)] = 0.0 # 异常值转为0
|
||||
ImageHandler.write_img(features_path, '', [0, 0, 0, 0, 0, 0], features_array)
|
||||
logger.info('create features matrix success!')
|
||||
# file.del_folder(workspace_block_tif_path)
|
||||
# file.del_folder(workspace_block_feature_path)
|
||||
return X_test_list
|
||||
|
||||
@staticmethod
|
||||
def predict_blok(clf, X_test, rows, cols, img_path, row_begin, col_begin, block_sum, n):
|
||||
logger.info('total:%s,block:%s testing data !path:%s', block_sum, n, img_path)
|
||||
|
||||
Y_test = clf.predict(X_test)
|
||||
img = Y_test.reshape(rows, cols)
|
||||
out_image = Image.fromarray(img)
|
||||
out_image.save(img_path)
|
||||
# bp = BlockProcess()
|
||||
# bp.assign_spatial_reference_bypoint(row_begin, col_begin, self.__proj, self.__geo, img_path)
|
||||
# sr = osr.SpatialReference()
|
||||
# sr.ImportFromWkt(self.__proj)
|
||||
# geo_transform = (self.__geo[0] + col_begin * self.__geo[1] + row_begin * self.__geo[2],
|
||||
# self.__geo[1],
|
||||
# self.__geo[2],
|
||||
# self.__geo[3] + col_begin * self.__geo[4] + row_begin * self.__geo[5],
|
||||
# self.__geo[4],
|
||||
# self.__geo[5]
|
||||
# )
|
||||
# dst_ds = gdal.Open(img_path, gdal.GA_Update)
|
||||
# if dst_ds is None:
|
||||
# return False
|
||||
# dst_ds.SetProjection(sr.ExportToWkt())
|
||||
# dst_ds.SetGeoTransform(geo_transform)
|
||||
# del dst_ds
|
||||
logger.info('total:%s,block:%s test data finished !path:%s', block_sum, n, img_path)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def predict(clf, X_test_list, out_tif_name, workspace_processing_path,rows, cols):
|
||||
"""
|
||||
预测数据
|
||||
:param clf : svm模型
|
||||
:return X_test_list: 分块测试集影像路径
|
||||
"""
|
||||
ml = MachineLeaning()
|
||||
# 开启多进程处理
|
||||
bp = BlockProcess()
|
||||
block_size = bp.get_block_size(rows, cols)
|
||||
|
||||
block_features_dir = X_test_list
|
||||
bp_cover_dir = os.path.join(workspace_processing_path, out_tif_name + '\\') # workspace_processing_path + out_tif_name + '\\'
|
||||
file.creat_dirs([bp_cover_dir])
|
||||
|
||||
processes_num = min([len(block_features_dir), multiprocessing.cpu_count() - 1])
|
||||
pool = multiprocessing.Pool(processes=processes_num)
|
||||
|
||||
for path, n in zip(block_features_dir, range(len(block_features_dir))):
|
||||
name = os.path.split(path)[1]
|
||||
band = ImageHandler.get_bands(path)
|
||||
if band == 1:
|
||||
features_array = np.zeros((1, 1024, 1024), dtype=float)
|
||||
feature_array = ImageHandler.get_data(path)
|
||||
features_array[0, :, :] = feature_array
|
||||
else:
|
||||
features_array = ImageHandler.get_data(path)
|
||||
|
||||
X_test = np.reshape(features_array, (features_array.shape[0], features_array[0].size)).T
|
||||
|
||||
suffix = '_' + name.split('_')[-4] + "_" + name.split('_')[-3] + "_" + name.split('_')[-2] + "_" + name.split('_')[-1]
|
||||
img_path = os.path.join(bp_cover_dir, out_tif_name + suffix) # bp_cover_dir + out_tif_name + suffix
|
||||
row_begin = int(name.split('_')[-4])
|
||||
col_begin = int(name.split('_')[-2])
|
||||
pool.apply_async(ml.predict_blok, (clf, X_test, block_size, block_size, img_path, row_begin, col_begin, len(block_features_dir), n))
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
# 合并影像
|
||||
data_dir = bp_cover_dir
|
||||
out_path = workspace_processing_path[0:-1]
|
||||
bp.combine(data_dir, cols, rows, out_path, file_type=['tif'], datetype='float32')
|
||||
|
||||
# 添加地理信息
|
||||
cover_path = os.path.join(workspace_processing_path, out_tif_name + ".tif") # workspace_processing_path + out_tif_name + ".tif"
|
||||
# bp.assign_spatial_reference_byfile(self.__ref_img_path, cover_path)
|
||||
return cover_path
|
||||
|
||||
@staticmethod
|
||||
def predict_VP(clf, X_test_list, out_tif_name, workspace_processing_path, rows, cols):
|
||||
"""
|
||||
预测数据
|
||||
:param clf : svm模型
|
||||
:return X_test_list: 分块测试集影像路径
|
||||
"""
|
||||
ml = MachineLeaning()
|
||||
# 开启多进程处理
|
||||
bp = BlockProcess()
|
||||
block_size = bp.get_block_size(rows, cols)
|
||||
|
||||
block_features_dir = X_test_list
|
||||
bp_cover_dir = os.path.join(workspace_processing_path, out_tif_name,
|
||||
'pre_result\\') # workspace_processing_path + out_tif_name + '\\'
|
||||
file.creat_dirs([bp_cover_dir])
|
||||
|
||||
processes_num = min([len(block_features_dir), multiprocessing.cpu_count() - 1])
|
||||
pool = multiprocessing.Pool(processes=processes_num)
|
||||
|
||||
for path, n in zip(block_features_dir, range(len(block_features_dir))):
|
||||
name = os.path.split(path)[1]
|
||||
features_array = ImageHandler.get_data(path)
|
||||
|
||||
X_test = np.reshape(features_array, (features_array.shape[0], features_array[0].size)).T
|
||||
|
||||
suffix = '_' + name.split('_')[-4] + "_" + name.split('_')[-3] + "_" + name.split('_')[-2] + "_" + \
|
||||
name.split('_')[-1]
|
||||
img_path = os.path.join(bp_cover_dir, out_tif_name + suffix) # bp_cover_dir + out_tif_name + suffix
|
||||
row_begin = int(name.split('_')[-4])
|
||||
col_begin = int(name.split('_')[-2])
|
||||
pool.apply_async(ml.predict_blok, (
|
||||
clf, X_test, block_size, block_size, img_path, row_begin, col_begin, len(block_features_dir), n))
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
|
||||
# 合并影像
|
||||
data_dir = bp_cover_dir
|
||||
out_path = workspace_processing_path[0:-1]
|
||||
bp.combine(data_dir, cols, rows, out_path, file_type=['tif'], datetype='float32')
|
||||
|
||||
# 添加地理信息
|
||||
cover_path = os.path.join(workspace_processing_path,
|
||||
out_tif_name + ".tif") # workspace_processing_path + out_tif_name + ".tif"
|
||||
# bp.assign_spatial_reference_byfile(self.__ref_img_path, cover_path)
|
||||
return cover_path
|
||||
|
||||
@staticmethod
|
||||
def get_name_list(feature_tif_dir):
|
||||
in_tif_paths = list(glob.glob(os.path.join(feature_tif_dir, '*.tif')))
|
||||
name_list = []
|
||||
dim = len(in_tif_paths)
|
||||
for n, path in zip(range(dim), in_tif_paths):
|
||||
name_list.append(str(n)+': '+os.path.split(path)[1])
|
||||
logger.info('feature_list:%s', name_list)
|
||||
return name_list
|
||||
|
||||
|
||||
@staticmethod
|
||||
def gene_train_set(train_data_dic, feature_tif_dir):
|
||||
"""
|
||||
生成训练集
|
||||
:param train_data_dic : 从csv读取的训练数据
|
||||
:param feature_tif_dir : 特征影像路径路径
|
||||
:return X_train, Y_train : 训练数据
|
||||
"""
|
||||
in_tif_paths = list(glob.glob(os.path.join(feature_tif_dir, '*.tif')))
|
||||
dim = len(in_tif_paths)
|
||||
X_train = np.empty(shape=(0, dim))
|
||||
Y_train = np.empty(shape=(0, 1))
|
||||
|
||||
ids = train_data_dic['ids']
|
||||
positions = train_data_dic['positions']
|
||||
for id, points in zip(ids, positions):
|
||||
# for data in train_data_list:
|
||||
if points == []:
|
||||
raise Exception('data is empty!')
|
||||
row, col = zip(*points)
|
||||
l = len(points)
|
||||
X = np.empty(shape=(l, dim))
|
||||
|
||||
for n, tif_path in zip(range(dim), in_tif_paths):
|
||||
feature_array = ImageHandler.get_data(tif_path)
|
||||
feature_array[np.isnan(feature_array)] = 0 # 异常值填充为0
|
||||
x = feature_array[row, col].T
|
||||
X[:, n] = x
|
||||
|
||||
Y = np.full((l, 1), id)
|
||||
X_train = np.vstack((X_train, X))
|
||||
Y_train = np.vstack((Y_train, Y))
|
||||
Y_train = Y_train.T[0, :]
|
||||
|
||||
logger.info("gene_train_set success!")
|
||||
return X_train, Y_train
|
||||
|
||||
@staticmethod
|
||||
def standardization(data, num=1):
|
||||
# 矩阵标准化到[0,1]
|
||||
min = np.nanmin(data)
|
||||
max = np.nanmax(data)
|
||||
data[np.isnan(data)] = min # 异常值填充为0
|
||||
_range = max - min
|
||||
return (data - min) / _range * num
|
||||
|
||||
@staticmethod
|
||||
def sel_optimal_feature_set(X_train, Y_train, threshold=0.01):
|
||||
"""
|
||||
筛选最优特征组合(极度随机树)
|
||||
"""
|
||||
model = ExtraTreesClassifier()
|
||||
max = np.max(Y_train)
|
||||
if max < 0.1:
|
||||
Y_train = (Y_train*10000).astype('int')
|
||||
model.fit(X_train, Y_train.astype('int'))
|
||||
# select the relative importance of each attribute
|
||||
importances = model.feature_importances_
|
||||
logger.info('importances:%s,threshold=%s', importances, threshold)
|
||||
|
||||
importances_resort = -np.sort(-importances) # 从大到小排序
|
||||
imp_argsort = np.argsort(-importances) # 输出从大到小的序号
|
||||
|
||||
optimal_feature = list(imp_argsort[np.where(importances_resort > threshold)]) # 过滤重要性低的特征
|
||||
logger.info('optimal_feature:%s', optimal_feature)
|
||||
|
||||
if len(optimal_feature)==0:
|
||||
logger.error('optimal_feature is empty')
|
||||
optimal_feature = list(imp_argsort)
|
||||
return optimal_feature
|
||||
|
||||
@staticmethod
|
||||
def correlation_map(x, y):
|
||||
# https://blog.csdn.net/weixin_39836726/article/details/110783640
|
||||
# cc matrix based on scipy pearsonr
|
||||
n_row_x = x.shape[0]
|
||||
n_row_y = x.shape[0]
|
||||
ccmtx_xy = np.empty((n_row_x, n_row_y))
|
||||
for n in range(n_row_x):
|
||||
for m in range(n_row_y):
|
||||
ccmtx_xy[n, m] = pearsonr(x[n, :], y[m, :])[0]
|
||||
return ccmtx_xy
|
||||
|
||||
@staticmethod
|
||||
def remove_correlation_feature(X_train,validity_list, threshold=0.85):
|
||||
"""
|
||||
相关性抑制,去除相关性
|
||||
:param X_train : 训练集
|
||||
:param validity_list : 最优特征子集
|
||||
:param threshold: 相关性阈值
|
||||
:return validity_list : 最优特征子集
|
||||
"""
|
||||
ccmtx = MachineLeaning().correlation_map(X_train[:, validity_list].T, X_train[:, validity_list].T)
|
||||
ccmtx = np.abs(ccmtx)
|
||||
for r in range(len(validity_list)):
|
||||
for c in range(len(validity_list)):
|
||||
if c <= r:
|
||||
ccmtx[r, c] = 0
|
||||
logger.info('correlation_map:\n %s', ccmtx)
|
||||
# 相关性大于0.85的特征,删除com_sep_coef较大的特征
|
||||
high_corr = np.unique(np.where(ccmtx > threshold)[1]) # 删除的特征序号
|
||||
validity_list = np.delete(validity_list, high_corr)
|
||||
logger.info('validity_list_corr:%s', validity_list)
|
||||
logger.info(validity_list)
|
||||
return validity_list
|
||||
|
||||
@staticmethod
|
||||
def gene_train_data(block_features_dir,rows,cols,block_size,measured_data_img):
|
||||
# 生成训练集
|
||||
X_train = []
|
||||
Y_train = []
|
||||
|
||||
block_rows = int(np.ceil(rows/block_size))
|
||||
block_cols = int(np.ceil(cols/block_size))
|
||||
|
||||
for data, n in zip(measured_data_img, range(len(measured_data_img))):
|
||||
row = data[0]
|
||||
col = data[1]
|
||||
block_row = row//block_size
|
||||
block_col = col//block_size
|
||||
|
||||
if block_row == block_rows-1:
|
||||
part_img_row = row - (rows - block_size)
|
||||
else:
|
||||
part_img_row = row % block_size
|
||||
|
||||
if block_col == block_cols-1:
|
||||
part_img_col = col - (cols-block_size)
|
||||
else:
|
||||
part_img_col = col % block_size
|
||||
|
||||
features_path = block_features_dir[block_row*block_rows + block_col]
|
||||
features_array = ImageHandler().get_data(features_path)
|
||||
|
||||
feature = features_array[:, part_img_row, part_img_col]
|
||||
if not np.isnan(feature).any() or np.isinf(feature).any():
|
||||
X_train.append(list(feature))
|
||||
Y_train.append([data[2]])
|
||||
logger.info('total:%s,num:%s create train set success!', len(measured_data_img), n)
|
||||
return np.array(X_train), np.array(Y_train)
|
||||
|
||||
@staticmethod
|
||||
def trainRF(X_train, Y_train):
|
||||
#随机森林
|
||||
logger.info('RF trainning')
|
||||
clf = RandomForestClassifier()
|
||||
clf.fit(X_train, Y_train)
|
||||
return clf
|
||||
|
||||
@staticmethod
|
||||
def trainSVM(X_train, Y_train, cost=1, kernel='rbf'):
|
||||
logger.info('svm trainning')
|
||||
clf = SVC(decision_function_shape='ovo')
|
||||
clf.fit(X_train, Y_train)
|
||||
SVC(C=cost, cache_size=1000, class_weight='balanced', coef0=0.0, decision_function_shape='ovr',
|
||||
degree=3, gamma='auto', kernel=kernel, max_iter=-1, probability=False, random_state=None,
|
||||
shrinking=True, tol=0.001, verbose=True)
|
||||
return clf
|
||||
@staticmethod
|
||||
def vegetationPhenology_combine_feature(feature_dir,workspace_processing_path, name, rows, cols, debug =False):
|
||||
ml = MachineLeaning()
|
||||
path_list = list(glob.glob(os.path.join(feature_dir, '*.tif')))
|
||||
#多维矩阵合并为一个
|
||||
name_featuresPath_dic = {}
|
||||
dim = len(path_list)
|
||||
features_path = workspace_processing_path + name + "/"+ name +'_features.tif'
|
||||
if debug== False:
|
||||
features_array = np.zeros((dim, rows, cols), dtype='float16')
|
||||
for m, path in zip(range(dim), path_list):
|
||||
data = ImageHandler.get_data(path)
|
||||
data = ml.standardization(data)
|
||||
features_array[m, :, :] = data
|
||||
# 异常值转为0
|
||||
features_array[np.isnan(features_array)] = 0.0
|
||||
features_array[np.isinf(features_array)] = 0.0
|
||||
ImageHandler.write_img(features_path, '', [0, 0, 0, 0, 0, 0], features_array)
|
||||
name_featuresPath_dic.update({name: features_path})
|
||||
return name_featuresPath_dic
|
||||
|
||||
|
|
@ -1,491 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:AHVToPolsarpro.py
|
||||
@Function:全极化影像转成polsarpro格式T3数据
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/9/18 16:44
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import os
|
||||
import numpy as np
|
||||
import glob
|
||||
import struct
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
|
||||
|
||||
class AHVToPolsarpro:
|
||||
"""
|
||||
全极化影像转换为bin格式T3矩阵,支持polsarpro处理
|
||||
"""
|
||||
|
||||
def __init__(self, hh_hv_vh_vv_path_list=[]):
|
||||
self._hh_hv_vh_vv_path_list = hh_hv_vh_vv_path_list
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def __ahv_to_s2_veg(ahv_dir):
|
||||
"""
|
||||
全极化影像转S2矩阵
|
||||
:param ahv_dir: 全极化影像文件夹路径
|
||||
:return: 极化散射矩阵S2
|
||||
"""
|
||||
global s11
|
||||
in_tif_paths = list(glob.glob(os.path.join(ahv_dir, '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(ahv_dir, '*.tiff')))
|
||||
in_tif_paths += in_tif_paths1
|
||||
s11, s12, s21, s22 = None, None, None, None
|
||||
flag_list = [0, 0, 0, 0]
|
||||
for in_tif_path in in_tif_paths:
|
||||
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
|
||||
# 获取极化类型
|
||||
if '_HH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s11 = data_real + 1j * data_imag
|
||||
flag_list[0] = 1
|
||||
elif '_HV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s12 = data_real + 1j * data_imag
|
||||
flag_list[1] = 1
|
||||
elif '_VH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s21 = data_real + 1j * data_imag
|
||||
flag_list[2] = 1
|
||||
elif '_VV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s22 = data_real + 1j * data_imag
|
||||
flag_list[3] = 1
|
||||
else:
|
||||
continue
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('HH or HV or VH or VV is not in path :%s', ahv_dir)
|
||||
return s11, s12, s21, s22
|
||||
|
||||
@staticmethod
|
||||
def __ahv_to_s2_soil(ahv_dir):
|
||||
"""
|
||||
全极化影像转S2矩阵
|
||||
:param ahv_dir: 全极化影像文件夹路径
|
||||
:return: 极化散射矩阵S2
|
||||
"""
|
||||
global s11
|
||||
in_tif_paths = list(glob.glob(os.path.join(ahv_dir, '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(ahv_dir, '*.tiff')))
|
||||
in_tif_paths += in_tif_paths1
|
||||
s11, s12, s21, s22 = None, None, None, None
|
||||
flag_list = [0, 0, 0, 0]
|
||||
for in_tif_path in in_tif_paths:
|
||||
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
|
||||
# 获取极化类型
|
||||
if 'HH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s11 = data_real + 1j * data_imag
|
||||
flag_list[0] = 1
|
||||
elif 'HV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s12 = data_real + 1j * data_imag
|
||||
flag_list[1] = 1
|
||||
elif 'VH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s21 = data_real + 1j * data_imag
|
||||
flag_list[2] = 1
|
||||
elif 'VV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s22 = data_real + 1j * data_imag
|
||||
flag_list[3] = 1
|
||||
else:
|
||||
continue
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('HH or HV or VH or VV is not in path :%s', ahv_dir)
|
||||
return s11, s12, s21, s22
|
||||
|
||||
@staticmethod
|
||||
def __ahv_to_s2_list(ahv_path_list):
|
||||
"""
|
||||
全极化影像转S2矩阵
|
||||
:param ahv_dir: 全极化影像文件夹路径
|
||||
:return: 极化散射矩阵S2
|
||||
"""
|
||||
global s11
|
||||
in_tif_paths = ahv_path_list
|
||||
s11, s12, s21, s22 = None, None, None, None
|
||||
flag_list = [0, 0, 0, 0]
|
||||
for in_tif_path in in_tif_paths:
|
||||
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
|
||||
# 获取极化类型
|
||||
if 'HH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s11 = data_real + 1j * data_imag
|
||||
flag_list[0] = 1
|
||||
elif 'HV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s12 = data_real + 1j * data_imag
|
||||
flag_list[1] = 1
|
||||
elif 'VH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s21 = data_real + 1j * data_imag
|
||||
flag_list[2] = 1
|
||||
elif 'VV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s22 = data_real + 1j * data_imag
|
||||
flag_list[3] = 1
|
||||
else:
|
||||
continue
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('HH or HV or VH or VV is not in path')
|
||||
return s11, s12, s21, s22
|
||||
|
||||
|
||||
@staticmethod
|
||||
def __ahv_to_s2_list_2(hh_hv_vh_vv_path_list):
|
||||
"""
|
||||
全极化影像转S2矩阵
|
||||
:param ahv_dir: 全极化影像文件夹路径
|
||||
:return: 极化散射矩阵S2
|
||||
"""
|
||||
global s11
|
||||
in_tif_paths = hh_hv_vh_vv_path_list
|
||||
s11, s12, s21, s22 = None, None, None, None
|
||||
flag_list = [0, 0, 0, 0]
|
||||
for in_tif_path, n in zip(in_tif_paths, range(len(in_tif_paths))):
|
||||
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
|
||||
# 获取极化类型
|
||||
if n == 0:
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s11 = data_real + 1j * data_imag
|
||||
flag_list[0] = 1
|
||||
elif n == 1:
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s12 = data_real + 1j * data_imag
|
||||
flag_list[1] = 1
|
||||
elif n == 2:
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s21 = data_real + 1j * data_imag
|
||||
flag_list[2] = 1
|
||||
elif n == 3:
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s22 = data_real + 1j * data_imag
|
||||
flag_list[3] = 1
|
||||
else:
|
||||
continue
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('HH or HV or VH or VV is not in path')
|
||||
return s11, s12, s21, s22
|
||||
|
||||
@staticmethod
|
||||
def __s2_to_t3(s11, s12, s21, s22):
|
||||
"""
|
||||
S2矩阵转T3矩阵
|
||||
:param s11: HH极化数据
|
||||
:param s12: HV极化数据
|
||||
:param s21: VH极化数据
|
||||
:param s22: VV极化数据
|
||||
:return: 极化相干矩阵T3
|
||||
"""
|
||||
HH = s11
|
||||
HV = s12
|
||||
VH = s21
|
||||
VV = s22
|
||||
|
||||
t11 = (np.abs(HH + VV)) ** 2 / 2
|
||||
t12 = (HH + VV) * np.conj(HH - VV) / 2
|
||||
t13 = (HH + VV) * np.conj(HV + VH)
|
||||
|
||||
t21 = (HH - VV) * np.conj(HH + VV) / 2
|
||||
t22 = np.abs(HH - VV) ** 2 / 2
|
||||
t23 = (HH - VV) * np.conj(HV + VH)
|
||||
|
||||
t31 = (HV + VH) * np.conj(HH + VV)
|
||||
t32 = (HV + VH) * np.conj(HH - VV)
|
||||
t33 = 2 * np.abs(HV + VH) ** 2
|
||||
return t11, t12, t13, t21, t22, t23, t31, t32, t33
|
||||
|
||||
def __t3_to_polsarpro_t3(self, out_dir, t11, t12, t13, t22, t23, t33):
|
||||
"""
|
||||
T3矩阵转bin格式,支持 polsarpro处理
|
||||
:param out_dir: 输出的文件夹路径
|
||||
:param t11:
|
||||
:param t12:
|
||||
:param t13:
|
||||
:param t22:
|
||||
:param t23:
|
||||
:param t33:
|
||||
:return: bin格式矩阵T3和头文件
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
|
||||
rows = t11.shape[0]
|
||||
cols = t11.shape[1]
|
||||
bins_dict = {
|
||||
'T11.bin': t11,
|
||||
'T12_real.bin': t12.real,
|
||||
'T12_imag.bin': t12.imag,
|
||||
'T13_real.bin': t13.real,
|
||||
'T13_imag.bin': t13.imag,
|
||||
'T22.bin': t22,
|
||||
'T23_real.bin': t23.real,
|
||||
'T23_imag.bin': t23.imag,
|
||||
'T33.bin': t33}
|
||||
|
||||
for name, data in bins_dict.items():
|
||||
bin_path = os.path.join(out_dir, name)
|
||||
self.__write_img_bin(data, bin_path) # todo 修改T3阵保存方式
|
||||
# data.tofile(bin_path)
|
||||
out_hdr_path = bin_path + '.hdr'
|
||||
self.__write_bin_hdr(out_hdr_path, bin_path, rows, cols)
|
||||
|
||||
self.__write_config_file(out_dir, rows, cols)
|
||||
|
||||
def rows(self):
|
||||
"""获取影像行数"""
|
||||
return self._rows
|
||||
|
||||
def cols(self):
|
||||
"""获取影像列数"""
|
||||
return self._cols
|
||||
|
||||
def __write_img_bin(self, im, file_path):
|
||||
"""
|
||||
写入影像到bin文件中,保存为float32类型
|
||||
:param im : 影像矩阵数据,暂支持单通道影像数据
|
||||
:param file_path: bin文件的完整路径
|
||||
"""
|
||||
with open(file_path, 'wb') as f:
|
||||
self._rows = im.shape[0]
|
||||
self._cols = im.shape[1]
|
||||
for row in range(self._rows):
|
||||
im_bin = struct.pack("f" * self._cols, *np.reshape(im[row, :], (self._cols, 1), order='F'))
|
||||
f.write(im_bin)
|
||||
f.close()
|
||||
|
||||
@staticmethod
|
||||
def __write_bin_hdr(out_hdr_path, bin_path, rows, cols):
|
||||
"""
|
||||
写入影像的头文件
|
||||
:param out_hdr_path : 头文件的路径
|
||||
:param bin_path: bin文件的路径
|
||||
:param rows: 影像的行数
|
||||
:param cols: 影像的列数
|
||||
"""
|
||||
h1 = 'ENVI'
|
||||
h2 = 'description = {'
|
||||
h3 = 'File Imported into ENVI. }'
|
||||
h4 = 'samples = ' + str(cols) # 列
|
||||
h5 = 'lines = ' + str(rows) # 行
|
||||
h6 = 'bands = 1 ' # 波段数
|
||||
h7 = 'header offset = 0'
|
||||
h8 = 'file type = ENVI Standard'
|
||||
h9 = 'data type = 4' # 数据格式
|
||||
h10 = 'interleave = bsq' # 存储格式
|
||||
h11 = 'sensor type = Unknown'
|
||||
h12 = 'byte order = 0'
|
||||
h13 = 'band names = {'
|
||||
h14 = bin_path + '}'
|
||||
# h = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14]
|
||||
# doc = open(out_hdr_path, 'w')
|
||||
# for i in range(0, 14):
|
||||
# print(h[i], end='', file=doc)
|
||||
# print('\n', end='', file=doc)
|
||||
h = [h1, h4, h5, h6, h7, h8, h9, h10, h12]
|
||||
doc = open(out_hdr_path, 'w')
|
||||
for i in range(0, 9):
|
||||
print(h[i], end='', file=doc)
|
||||
print('\n', end='', file=doc)
|
||||
doc.close()
|
||||
|
||||
@staticmethod
|
||||
def __write_config_file(out_config_dir, rows, cols):
|
||||
"""
|
||||
写入polsarpro配置文件
|
||||
:param out_config_dir : 配置文件路径
|
||||
:param rows: 影像的行数
|
||||
:param cols: 影像的列数
|
||||
"""
|
||||
h1 = 'Nrow'
|
||||
h2 = str(rows)
|
||||
h3 = '---------'
|
||||
h4 = 'Ncol'
|
||||
h5 = str(cols)
|
||||
h6 = '---------'
|
||||
h7 = 'PolarCase'
|
||||
h8 = 'monostatic'
|
||||
h9 = '---------'
|
||||
h10 = 'PolarType'
|
||||
h11 = 'full'
|
||||
h = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11]
|
||||
|
||||
out_config_path = os.path.join(out_config_dir, 'config.txt')
|
||||
doc = open(out_config_path, 'w')
|
||||
for i in range(0, 11):
|
||||
print(h[i], end='', file=doc)
|
||||
print('\n', end='', file=doc)
|
||||
doc.close()
|
||||
|
||||
def incidence_tif2bin(self, incidence_file, out_path):
|
||||
if not os.path.exists(out_path):
|
||||
os.mkdir(out_path)
|
||||
incidence_bin = os.path.join(out_path, 'incidence.bin')
|
||||
data = ImageHandler().get_data(incidence_file)
|
||||
rows = data.shape[0]
|
||||
cols = data.shape[1]
|
||||
self.__write_img_bin(data, incidence_bin)
|
||||
if not os.path.exists(incidence_bin):
|
||||
raise Exception('incidence to bin failed')
|
||||
out_hdr_path = incidence_bin + '.hdr'
|
||||
self.__write_bin_hdr(out_hdr_path, incidence_bin, rows, cols)
|
||||
return incidence_bin
|
||||
|
||||
def ahv_to_polsarpro_t3_veg(self, out_file_dir, in_ahv_dir=''):
|
||||
|
||||
if self._hh_hv_vh_vv_path_list == [] :
|
||||
s11, s12, s21, s22 = self.__ahv_to_s2_veg(in_ahv_dir)
|
||||
else:
|
||||
s11, s12, s21, s22 = self.__ahv_to_s2_list_2(self._hh_hv_vh_vv_path_list)
|
||||
|
||||
t11, t12, t13, t21, t22, t23, t31, t32, t33 = self.__s2_to_t3(
|
||||
s11, s12, s21, s22)
|
||||
|
||||
self.__t3_to_polsarpro_t3(out_file_dir, t11, t12, t13, t22, t23, t33)
|
||||
|
||||
|
||||
def ahv_to_polsarpro_t3_soil(self, out_file_dir, in_ahv_dir=''):
|
||||
|
||||
if self._hh_hv_vh_vv_path_list == [] :
|
||||
s11, s12, s21, s22 = self.__ahv_to_s2_soil(in_ahv_dir)
|
||||
else:
|
||||
s11, s12, s21, s22 = self.__ahv_to_s2_list_2(self._hh_hv_vh_vv_path_list)
|
||||
|
||||
t11, t12, t13, t21, t22, t23, t31, t32, t33 = self.__s2_to_t3(
|
||||
s11, s12, s21, s22)
|
||||
|
||||
self.__t3_to_polsarpro_t3(out_file_dir, t11, t12, t13, t22, t23, t33)
|
||||
|
||||
def calibration(self, calibration_value, in_ahv_dir='', name=''):
|
||||
if name == '':
|
||||
out_dir = os.path.join(in_ahv_dir, 'calibration')
|
||||
else:
|
||||
out_dir = os.path.join(in_ahv_dir, name, 'calibration')
|
||||
flag_list = [0, 0, 0, 0]
|
||||
if self._hh_hv_vh_vv_path_list == []: # 地表覆盖、土壤盐碱度
|
||||
in_tif_paths = list(glob.glob(os.path.join(in_ahv_dir, '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(in_ahv_dir, '*.tiff')))
|
||||
in_tif_paths += in_tif_paths1
|
||||
for in_tif_path in in_tif_paths:
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
name = os.path.basename(in_tif_path)
|
||||
data_new = np.zeros(data.shape)
|
||||
# 获取极化类型
|
||||
if 'HH' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[0]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[0]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[0] = 1
|
||||
elif 'HV' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[1]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[1]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[1] = 1
|
||||
elif 'VH' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[2]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[2]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[2] = 1
|
||||
elif 'VV' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[3]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[3]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[3] = 1
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('calibration error! ')
|
||||
else:
|
||||
for in_tif_path in self._hh_hv_vh_vv_path_list: # 植被物候
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
name = os.path.basename(in_tif_path)
|
||||
data_new = np.zeros(data.shape)
|
||||
|
||||
# 获取极化类型
|
||||
if '_HH' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[0]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[0]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[0] = 1
|
||||
elif '_HV' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[1]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[1]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[1] = 1
|
||||
elif '_VH' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[2]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[2]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[2] = 1
|
||||
elif '_VV' in os.path.basename(in_tif_path):
|
||||
data_new[0, :, :] = data[0, :, :] * calibration_value[3]
|
||||
data_new[1, :, :] = data[1, :, :] * calibration_value[3]
|
||||
ImageHandler.write_img(os.path.join(out_dir, name), proj, geotrans, data_new)
|
||||
flag_list[3] = 1
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('calibration error! ')
|
||||
self._hh_hv_vh_vv_path_list = []
|
||||
return out_dir
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
#实例1:
|
||||
# atp = AHVToPolsarpro()
|
||||
# ahv_path = 'D:\\DATA\\GAOFEN3\\2-GF3_MYN_WAV_020086_E107.2_N27.6_20200603_L1A_AHV_L10004843087\\'
|
||||
# # ahv_path = 'D:\\DATA\\GAOFEN3\\2598957_Paris\\'
|
||||
# out_file_path = 'D:\\bintest0923\\'
|
||||
# atp.ahv_to_polsarpro_t3(out_file_path, ahv_path)
|
||||
|
||||
# # 极化分解得到T3矩阵
|
||||
# atp = AHVToPolsarpro()
|
||||
# ahv_path = r"I:\MicroWorkspace\product\C-SAR\SoilSalinity\GF3B_MYC_QPSI_003581_E120.6_N31.3_20220729_L1A_AHV_L10000073024_RPC"
|
||||
# t3_path = ahv_path + 'psp_t3\\'
|
||||
# atp.ahv_to_polsarpro_t3(t3_path, ahv_path)
|
||||
|
||||
#实例2:
|
||||
# dir = r'D:\MicroWorkspace\product\C-SAR\VegetationPhenology\Temporary\preprocessed/'
|
||||
# path_list = [dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_HH_preprocessed.tif',
|
||||
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_HV_preprocessed.tif',
|
||||
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_VH_preprocessed.tif',
|
||||
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_VV_preprocessed.tif']
|
||||
#
|
||||
#
|
||||
# atp = AHVToPolsarpro(path_list)
|
||||
# atp.ahv_to_polsarpro_t3(r'D:\MicroWorkspace\product\C-SAR\VegetationPhenology\Temporary\processing\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC/t3')
|
||||
|
||||
print("done")
|
|
@ -1,228 +0,0 @@
|
|||
"""
|
||||
@Project :microproduct
|
||||
@File :AHVToPolsarpro.PY
|
||||
@Function :将四个极化数据转成S2矩阵文件
|
||||
@Author :LMM
|
||||
@Date :2021/10/19 14:39
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import os
|
||||
import numpy as np
|
||||
import glob
|
||||
import struct
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
|
||||
|
||||
class AHVToPolsarproS2:
|
||||
"""
|
||||
全极化影像转换为bin格式S2矩阵,支持polsarpro处理
|
||||
"""
|
||||
def __init__(self):
|
||||
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def __ahv_to_s2(ahv_dir):
|
||||
"""
|
||||
全极化影像转S2矩阵
|
||||
:param ahv_dir: 全极化影像文件夹路径
|
||||
:return: 极化散射矩阵S2
|
||||
"""
|
||||
in_tif_paths = list(glob.glob(os.path.join(ahv_dir, '*.tif')))
|
||||
|
||||
if in_tif_paths == []:
|
||||
in_tif_paths = list(glob.glob(os.path.join(ahv_dir, '*.tiff')))
|
||||
s11, s12, s21, s22 = None,None,None,None
|
||||
flag_list = [0, 0, 0, 0]
|
||||
for in_tif_path in in_tif_paths:
|
||||
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
|
||||
# 获取极化类型
|
||||
if 'HH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :] # 获取第一个波段 (实部)
|
||||
data_imag = data[1, :, :] # 获取第二个波段 (虚部)
|
||||
s11 = data_real + 1j * data_imag
|
||||
flag_list[0] = 1
|
||||
elif 'HV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s12 = data_real + 1j * data_imag
|
||||
flag_list[1] = 1
|
||||
elif 'VH' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s21 = data_real + 1j * data_imag
|
||||
flag_list[2] = 1
|
||||
elif 'VV' in os.path.basename(in_tif_path):
|
||||
data_real = data[0, :, :]
|
||||
data_imag = data[1, :, :]
|
||||
s22 = data_real + 1j * data_imag
|
||||
flag_list[3] = 1
|
||||
else:
|
||||
continue
|
||||
if not flag_list == [1, 1, 1, 1]:
|
||||
raise Exception('tif of HH or HV or VH or VV is not in path :%s', ahv_dir)
|
||||
return s11, s12, s21, s22
|
||||
|
||||
def __s2_to_bin(self, out_dir, s11, s12, s21, s22):
|
||||
"""
|
||||
S2矩阵转bin格式,支持 polsarpro处理
|
||||
:param out_dir: 输出的文件夹路径
|
||||
:param s11:
|
||||
:param s12:
|
||||
:param s21
|
||||
:param s22:
|
||||
:return: bin格式矩阵S2和头文件
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
|
||||
rows = s11.shape[0]
|
||||
cols = s11.shape[1]
|
||||
bins_dict = {'s11.bin': s11,
|
||||
's12.bin': s12,
|
||||
's21.bin': s21,
|
||||
's22.bin': s22}
|
||||
|
||||
|
||||
for name, data in bins_dict.items():
|
||||
|
||||
bin_path = os.path.join(out_dir, name)
|
||||
self.__write_slc_img_bin(data, bin_path,name)
|
||||
out_hdr_path = bin_path+'.hdr'
|
||||
self.__write_bin_hdr(out_hdr_path, bin_path, rows, cols)
|
||||
|
||||
self.__write_config_file(out_dir, rows, cols)
|
||||
|
||||
@staticmethod
|
||||
def __write_slc_img_bin(im, file_path,name):
|
||||
"""
|
||||
写入影像到bin文件中,保存为float32类型
|
||||
:param im : 影像矩阵数据,暂支持单通道影像数据
|
||||
:param file_path: bin文件的完整路径
|
||||
"""
|
||||
with open(file_path, 'wb') as f:
|
||||
rows = im.shape[0]
|
||||
cols = im.shape[1]
|
||||
cre_im = np.zeros((rows, 2*cols), dtype=float)
|
||||
cre_im[:, ::2] = im.real #存 real
|
||||
cre_im[:, 1::2] = im.imag #存 imag
|
||||
for row in range(rows):
|
||||
cre_im_bin = struct.pack("f" * 2*cols, *np.reshape(cre_im[row, :], (2*cols, 1), order='F'))
|
||||
f.write(cre_im_bin)
|
||||
f.close()
|
||||
|
||||
@staticmethod
|
||||
def read_slc_bin_to_img(bin_path):
|
||||
"""
|
||||
读取bin格式二进制数据,输出为矩阵
|
||||
:param bin_path : bin文件的路径,包含.bin,.config
|
||||
:return : 矩阵信息
|
||||
"""
|
||||
(bin_dir, bin_name) = os.path.split(bin_path)
|
||||
config_path = os.path.join(bin_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
rows = int(config[1])
|
||||
cols = int(config[4])
|
||||
|
||||
bin_file = open(bin_path, 'rb') # 打开二进制文件
|
||||
size = os.path.getsize(bin_path) # 获得文件大小
|
||||
if size < rows * cols * 4 * 2:
|
||||
raise Exception(
|
||||
'bin size less than rows*cols*4! size:',
|
||||
size,
|
||||
'byte, rows:',
|
||||
rows,
|
||||
'cols:',
|
||||
cols)
|
||||
|
||||
bin_data = np.zeros([rows, cols*2], dtype=np.float32)
|
||||
img_array = np.zeros([2,rows, cols], dtype=np.float32)
|
||||
for row in range(rows):
|
||||
data = bin_file.read(4 * cols * 2) # 每次读取一行的二进制数据
|
||||
row_data = struct.unpack('f' * cols*2, data) # 转为一行float数据
|
||||
bin_data[row, :] = row_data
|
||||
bin_file.close()
|
||||
img_array[0] = bin_data[:, ::2] # real
|
||||
img_array[1] = bin_data[:, 1::2] # imag
|
||||
return img_array
|
||||
|
||||
|
||||
@staticmethod
|
||||
def __write_bin_hdr(out_hdr_path, bin_path, rows, cols):
|
||||
"""
|
||||
写入影像的头文件
|
||||
:param out_hdr_path : 头文件的路径
|
||||
:param bin_path: bin文件的路径
|
||||
:param rows: 影像的行数
|
||||
:param cols: 影像的列数
|
||||
"""
|
||||
h1 = 'ENVI'
|
||||
h2 = 'description = {'
|
||||
h3 = 'ENVI File, Created [] }'
|
||||
h4 = 'samples = ' + str(cols) # 列
|
||||
h5 = 'lines = ' + str(rows) # 行
|
||||
h6 = 'bands = 1 ' # 波段数
|
||||
h7 = 'header offset = 0'
|
||||
h8 = 'file type = ENVI Standard'
|
||||
h9 = 'data type = 6' # 数据格式,6代表复数
|
||||
h10 = 'interleave = bsq' # 存储格式
|
||||
h11 = 'sensor type = Unknown'
|
||||
h12 = 'byte order = 0'
|
||||
h13 = 'wavelength units = Unknown'
|
||||
h14 = 'complex function = Power'
|
||||
h = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14]
|
||||
doc = open(out_hdr_path, 'w')
|
||||
for i in range(0, 14):
|
||||
print(h[i], end='', file=doc)
|
||||
print('\n', end='', file=doc)
|
||||
doc.close()
|
||||
|
||||
@staticmethod
|
||||
def __write_config_file(out_config_dir, rows, cols):
|
||||
"""
|
||||
写入polsarpro配置文件
|
||||
:param out_config_dir : 配置文件路径
|
||||
:param rows: 影像的行数
|
||||
:param cols: 影像的列数
|
||||
"""
|
||||
h1 = 'Nrow'
|
||||
h2 = str(rows)
|
||||
h3 = '---------'
|
||||
h4 = 'Ncol'
|
||||
h5 = str(cols)
|
||||
h6 = '---------'
|
||||
h7 = 'PolarCase'
|
||||
# h8 = 'monostatic'
|
||||
h8 = 'bistatic'
|
||||
h9 = '---------'
|
||||
h10 = 'PolarType'
|
||||
h11 = 'full'
|
||||
h = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11]
|
||||
|
||||
out_config_path = os.path.join(out_config_dir, 'config.txt')
|
||||
doc = open(out_config_path, 'w')
|
||||
for i in range(0, 11):
|
||||
print(h[i], end='', file=doc)
|
||||
print('\n', end='', file=doc)
|
||||
doc.close()
|
||||
|
||||
def api_ahv_to_polsarpro_s2(self, out_file_dir, in_ahv_dir):
|
||||
|
||||
s11, s12, s21, s22 = self.__ahv_to_s2(in_ahv_dir)
|
||||
|
||||
self.__s2_to_bin(out_file_dir, s11, s12, s21, s22)
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# # test()
|
||||
# atp = AHVToPolsarproS2()
|
||||
# ahv_path = r'D:\DATA\GAOFEN3\2-GF3_MYN_WAV_020086_E107.2_N27.6_20200603_L1A_AHV_L10004843087'
|
||||
# # ahv_path = 'D:\\DATA\\GAOFEN3\\2598957_Paris\\'
|
||||
# out_file_path = r'D:\DATA\GAOFEN3\2-GF3_MYN_WAV_020086_E107.2_N27.6_20200603_L1A_AHV_L10004843087\SLC_SHJ_2'
|
||||
# atp.api_ahv_to_polsarpro_s2(out_file_path, ahv_path)
|
||||
# bin_path = r'D:\DATA\GAOFEN3\2-GF3_MYN_WAV_020086_E107.2_N27.6_20200603_L1A_AHV_L10004843087\SLC_SHJ\s11.bin'
|
||||
# # data = atp.read_slc_bin_to_img(bin_path)
|
||||
# print("done")
|
|
@ -1,196 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:DualPolarToPolsarproC2.py
|
||||
@Function:双极化影像转成polsarpro格式C2数据
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/11/5
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import os
|
||||
import numpy as np
|
||||
import glob
|
||||
import struct
|
||||
import gc
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
|
||||
|
||||
class DualPolarToPolsarproC2:
|
||||
"""
|
||||
双极化影像转换为bin格式C2矩阵,支持polsarpro处理
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def __dual_polar_to_c2(dual_polar_dir):
|
||||
"""
|
||||
双影像转S2矩阵
|
||||
:param dual_polar_dir: 双极化影像文件夹路径
|
||||
:return: C2矩阵
|
||||
"""
|
||||
in_tif_paths = list(glob.glob(os.path.join(dual_polar_dir, '*.tif')))
|
||||
|
||||
if in_tif_paths == []:
|
||||
in_tif_paths = list(glob.glob(os.path.join(dual_polar_dir, '*.tiff')))
|
||||
s11, s22 = None, None
|
||||
flag_list = [0, 0, 0, 0]
|
||||
for in_tif_path in in_tif_paths:
|
||||
# 读取原始SAR影像
|
||||
proj, geotrans, data = ImageHandler.read_img(in_tif_path)
|
||||
# 获取极化类型
|
||||
if 'HH' in os.path.basename(in_tif_path):
|
||||
s11 = data[0, :, :] + 1j * data[1, :, :]
|
||||
flag_list[0] = 1
|
||||
elif 'HV' in os.path.basename(in_tif_path):
|
||||
s22 = data[0, :, :] + 1j * data[1, :, :]
|
||||
flag_list[1] = 1
|
||||
elif 'VH' in os.path.basename(in_tif_path):
|
||||
s22 = data[0, :, :] + 1j * data[1, :, :]
|
||||
flag_list[2] = 1
|
||||
elif 'VV' in os.path.basename(in_tif_path):
|
||||
s11 = data[0, :, :] + 1j * data[1, :, :]
|
||||
flag_list[3] = 1
|
||||
else:
|
||||
continue
|
||||
del data
|
||||
gc.collect()
|
||||
|
||||
if flag_list != [1, 1, 0, 0] and flag_list != [0, 0, 1, 1] :
|
||||
raise Exception('Dual-Polarization SAR is not in path :%s',in_tif_path)
|
||||
|
||||
c11,c12,c22 = None, None, None
|
||||
c11 = np.abs(s11)** 2
|
||||
c12 = s11 * np.conj(s22)
|
||||
del s11
|
||||
gc.collect()
|
||||
c22 = np.abs(s22)**2
|
||||
return c11, c12, c22
|
||||
|
||||
def __c2_to_polsarpro_c2(self, out_dir, c11, c12, c22):
|
||||
"""
|
||||
C2矩阵转bin格式,支持 polsarpro处理
|
||||
:param out_dir: 输出的文件夹路径
|
||||
:param c11:
|
||||
:param c12:
|
||||
:param c21:
|
||||
:param c22:
|
||||
:return: bin格式矩阵C3和头文件
|
||||
"""
|
||||
if not os.path.exists(out_dir):
|
||||
os.makedirs(out_dir)
|
||||
|
||||
rows = c11.shape[0]
|
||||
cols = c11.shape[1]
|
||||
bins_dict = {
|
||||
'C11.bin': c11,
|
||||
'C12_real.bin': c12.real,
|
||||
'C12_imag.bin': c12.imag,
|
||||
'C22.bin': c22}
|
||||
|
||||
for name, data in bins_dict.items():
|
||||
bin_path = os.path.join(out_dir, name)
|
||||
self.__write_img_bin(data, bin_path)
|
||||
out_hdr_path = bin_path + '.hdr'
|
||||
self.__write_bin_hdr(out_hdr_path, bin_path, rows, cols)
|
||||
|
||||
self.__write_config_file(out_dir, rows, cols)
|
||||
|
||||
def rows(self):
|
||||
"""获取影像行数"""
|
||||
return self._rows
|
||||
|
||||
def cols(self):
|
||||
"""获取影像列数"""
|
||||
return self._cols
|
||||
|
||||
def __write_img_bin(self, im, file_path):
|
||||
"""
|
||||
写入影像到bin文件中,保存为float32类型
|
||||
:param im : 影像矩阵数据,暂支持单通道影像数据
|
||||
:param file_path: bin文件的完整路径
|
||||
"""
|
||||
with open(file_path, 'wb') as f:
|
||||
self._rows = im.shape[0]
|
||||
self._cols = im.shape[1]
|
||||
for row in range(self._rows):
|
||||
im_bin = struct.pack("f" * self._cols, *np.reshape(im[row, :], (self._cols, 1), order='F'))
|
||||
f.write(im_bin)
|
||||
f.close()
|
||||
|
||||
@staticmethod
|
||||
def __write_bin_hdr(out_hdr_path, bin_path, rows, cols):
|
||||
"""
|
||||
写入影像的头文件
|
||||
:param out_hdr_path : 头文件的路径
|
||||
:param bin_path: bin文件的路径
|
||||
:param rows: 影像的行数
|
||||
:param cols: 影像的列数
|
||||
"""
|
||||
name = os.path.split(bin_path)[1]
|
||||
h1 = 'ENVI'
|
||||
h2 = 'description = {'
|
||||
h3 = 'File Imported into ENVI. }'
|
||||
h4 = 'samples = ' + str(cols) # 列
|
||||
h5 = 'lines = ' + str(rows) # 行
|
||||
h6 = 'bands = 1 ' # 波段数
|
||||
h7 = 'header offset = 0'
|
||||
h8 = 'file type = ENVI Standard'
|
||||
h9 = 'data type = 4' # 数据格式 浮点型
|
||||
h10 = 'interleave = bsq' # 存储格式
|
||||
h11 = 'sensor type = Unknown'
|
||||
h12 = 'byte order = 0'
|
||||
h13 = 'band names = {'
|
||||
h14 = name + '}'
|
||||
h = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11, h12, h13, h14]
|
||||
doc = open(out_hdr_path, 'w')
|
||||
for i in range(0, 14):
|
||||
print(h[i], end='', file=doc)
|
||||
print('\n', end='', file=doc)
|
||||
doc.close()
|
||||
|
||||
@staticmethod
|
||||
def __write_config_file(out_config_dir, rows, cols):
|
||||
"""
|
||||
写入polsarpro配置文件
|
||||
:param out_config_dir : 配置文件路径
|
||||
:param rows: 影像的行数
|
||||
:param cols: 影像的列数
|
||||
"""
|
||||
h1 = 'Nrow'
|
||||
h2 = str(rows)
|
||||
h3 = '---------'
|
||||
h4 = 'Ncol'
|
||||
h5 = str(cols)
|
||||
h6 = '---------'
|
||||
h7 = 'PolarCase'
|
||||
h8 = 'monostatic'
|
||||
h9 = '---------'
|
||||
h10 = 'PolarType'
|
||||
h11 = 'pp1'
|
||||
h = [h1, h2, h3, h4, h5, h6, h7, h8, h9, h10, h11]
|
||||
|
||||
out_config_path = os.path.join(out_config_dir, 'config.txt')
|
||||
doc = open(out_config_path, 'w')
|
||||
for i in range(0, 11):
|
||||
print(h[i], end='', file=doc)
|
||||
print('\n', end='', file=doc)
|
||||
doc.close()
|
||||
|
||||
|
||||
def api_dual_polar__to_polsarpro_c2(self, out_file_dir, dual_polar_dir):
|
||||
c11, c12, c22 = self.__dual_polar_to_c2(dual_polar_dir)
|
||||
self.__c2_to_polsarpro_c2(out_file_dir,c11, c12, c22)
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# tp = DualPolarToPolsarproC2()
|
||||
# out_dic = 'E:\\3-GF3_KAS_FSI_020253_E110.8_N25.5_20200614_L1A_HHHV_L10004871459\\SLC_SHJ1'
|
||||
# in_dic = 'E:\\3-GF3_KAS_FSI_020253_E110.8_N25.5_20200614_L1A_HHHV_L10004871459\\'
|
||||
# # out_file_path = 'D:\\bintest0923\\'
|
||||
# tp.api_dual_polar__to_polsarpro_c2(out_dic,in_dic)
|
||||
# # atp.ahv_to_polsarpro_t3(out_file_path, ahv_path)
|
||||
#
|
||||
# print("done")
|
|
@ -1,97 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :onestar
|
||||
@File :GLDM.py
|
||||
@Contact:
|
||||
scikit-image feature计算图像特征:https://blog.csdn.net/lyxleft/article/details/102904909
|
||||
python如何在二维图像上进行卷积:https://www.xz577.com/j/281686.html
|
||||
利用python的skimage计算灰度共生矩阵:https://zhuanlan.zhihu.com/p/147066037
|
||||
@function :计算图像灰度共生矩阵
|
||||
@Author :SHJ
|
||||
@Date :2021/11/10 14:42
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import numpy as np
|
||||
import os
|
||||
from skimage.feature import greycomatrix, greycoprops
|
||||
import datetime
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
|
||||
class GLDM:
|
||||
def __init__(self,win_size = 15, step=2,levels=16,angles=[0,45,90,135],
|
||||
prop=['contrast', 'dissimilarity', 'homogeneity', 'energy', 'correlation', 'ASM']):
|
||||
self._win_size = win_size # 计算灰度共生矩阵窗口尺寸,为奇数
|
||||
self._step = step # 步长
|
||||
self._levels = levels # 灰度等级:例如16,256
|
||||
self._angles = list(np.deg2rad(np.array(angles))) #角度,使用弧度制
|
||||
"""
|
||||
'contrast':对比度:反映了图像的清晰度和纹理沟纹深浅的程度
|
||||
'dissimilarity':差异性
|
||||
'homogeneity':同质性/逆差距:度量图像纹理局部变化的多少。其值大则说明图像纹理的不同区域间缺少变化,局部非常均匀。
|
||||
'energy':能量:是灰度共生矩阵元素值的平方和,所以也称能量,反映了图像灰度分布均匀程度和纹理粗细度
|
||||
'correlation':相关性:它度量空间灰度共生矩阵元素在行或列方向上的相似程度
|
||||
'ASM':二阶距
|
||||
"""
|
||||
self._prop = prop #纹理特征名称
|
||||
|
||||
def get_glcm_value(self,input):
|
||||
values_temp = []
|
||||
# 统计得到glcm
|
||||
# 得到共生矩阵,参数:图像矩阵,距离,方向,灰度级别,是否对称,是否标准化
|
||||
# para2: [0, np.pi / 4, np.pi / 2, np.pi * 3 / 4] 一共计算了四个方向,你也可以选择一个方向
|
||||
glcm = greycomatrix(input, [self._step], self._angles, self._levels, symmetric=False, normed=True)
|
||||
# print(glcm.shape)
|
||||
# 循环计算表征纹理的参数
|
||||
for prop in self._prop:
|
||||
temp = greycoprops(glcm, prop)
|
||||
# print(temp)
|
||||
values_temp.append(np.mean(temp))
|
||||
return values_temp
|
||||
|
||||
|
||||
def get_glcm_array(self,inputs: np.ndarray, win_size):
|
||||
h, w = inputs.shape
|
||||
pad = (win_size - 1) // 2
|
||||
inputs = np.pad(inputs, pad_width=[(pad, pad), (pad, pad)], mode="constant", constant_values=0)
|
||||
glcm_array ={}
|
||||
for name in self._prop:
|
||||
glcm_array.update({name:np.zeros(shape=(h, w),dtype=np.float32)})
|
||||
|
||||
for i in range(h): # 行号
|
||||
for j in range(w): # 列号
|
||||
window = inputs[i: i + win_size, j: j + win_size]
|
||||
value = self.get_glcm_value(window)
|
||||
print('i:%s,j:%s',i,j)
|
||||
# print(value)
|
||||
for n,array in zip(range(len(glcm_array)),glcm_array.values()):
|
||||
array[i,j] = value[n]
|
||||
return glcm_array
|
||||
|
||||
@staticmethod
|
||||
def standardization(data, num=1):
|
||||
# 矩阵标准化到[0,1]
|
||||
data[np.isnan(data)] = np.min(data) # 异常值填充为0
|
||||
_range = np.max(data) - np.min(data)
|
||||
return (data - np.min(data)) / _range * num
|
||||
|
||||
def api_get_glcm_array(self,out_dir,in_tif_path,name=''):
|
||||
|
||||
ih = ImageHandler()
|
||||
proj, geotrans, array = ih.read_img(in_tif_path)
|
||||
array[np.where(array > 500000)]=500000 #去除过大的值,避免标准化时,大部分的值都接近0
|
||||
array = self.standardization(array,self._levels-1) #标准化到0~(self._levels-1)
|
||||
array = np.uint8(array)
|
||||
glcm_array = self.get_glcm_array(array, self._win_size)
|
||||
for key,value in glcm_array.items():
|
||||
out_path = os.path.join(out_dir,name+'_'+key+'.tif')
|
||||
ih.write_img(out_path, proj, geotrans,value)
|
||||
|
||||
if __name__ == '__main__':
|
||||
start = datetime.datetime.now()
|
||||
gldm = GLDM(win_size=9,levels=16,step=3,angles=[0,45,90,135])
|
||||
gldm.api_get_glcm_array('D:\glcm','D:\glcm\src_img.tif',)
|
||||
end = datetime.datetime.now()
|
||||
msg = 'running use time: %s ' % (end - start)
|
||||
print(msg)
|
||||
|
||||
# 666*720尺寸影像消耗的running use time: 0:04:23.155424
|
|
@ -1,85 +0,0 @@
|
|||
import os
|
||||
import glob
|
||||
import numpy as np
|
||||
import struct
|
||||
from PIL import Image
|
||||
from tool.algorithm.ml.machineLearning import MachineLeaning as ml
|
||||
|
||||
|
||||
def read_bin_to_img(bin_path):
|
||||
"""
|
||||
读取bin格式二进制数据,输出为矩阵
|
||||
:param bin_path : bin文件的路径,包含.bin,.config
|
||||
:return : 矩阵信息
|
||||
"""
|
||||
(bin_dir, bin_name) = os.path.split(bin_path)
|
||||
config_path = os.path.join(bin_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
rows = int(config[1])
|
||||
cols = int(config[4])
|
||||
|
||||
bin_file = open(bin_path, 'rb') # 打开二进制文件
|
||||
size = os.path.getsize(bin_path) # 获得文件大小
|
||||
if size < rows * cols * 4:
|
||||
raise Exception(
|
||||
'bin size less than rows*cols*4! size:',
|
||||
size,
|
||||
'byte, rows:',
|
||||
rows,
|
||||
'cols:',
|
||||
cols)
|
||||
|
||||
img = np.zeros([rows, cols], dtype=np.float32)
|
||||
for row in range(rows):
|
||||
data = bin_file.read(4 * cols) # 每次读取一行的二进制数据
|
||||
row_data = struct.unpack('f' * cols, data) # 转为一行float数据
|
||||
img[row, :] = row_data
|
||||
bin_file.close()
|
||||
return img
|
||||
|
||||
def write_bin_to_tif(out_tif_dir, bin_dir):
|
||||
"""
|
||||
读取H-A-Alpha分解二进制数据,输出为矩阵格式的字典
|
||||
:param out_tif_dir : tif的输出路径
|
||||
:param bin_dir : 二进制数据的目录,包含.bin,.config
|
||||
:return out_tif_path: 生成tif的路径字典
|
||||
"""
|
||||
bin_paths = list(glob.glob(os.path.join(bin_dir, '*.bin')))
|
||||
out_tif_path = {}
|
||||
for in_path in bin_paths:
|
||||
name = os.path.split(in_path)[1].split('.')[0]
|
||||
out_path = os.path.join(out_tif_dir, name + '.tif')
|
||||
out_tif_path.update({name: out_path})
|
||||
if os.path.exists(os.path.split(out_path)[0]) is False:
|
||||
os.makedirs(os.path.split(out_path)[0])
|
||||
img_array = read_bin_to_img(in_path)
|
||||
img_array[np.isnan(img_array)] = 0 # 异常值填充为0
|
||||
img_array = ml.standardization(img_array) # 数据标准化到[0,1]
|
||||
out_image = Image.fromarray(img_array)
|
||||
out_image.save(out_path)
|
||||
return out_tif_path
|
||||
|
||||
def write_bin_to_tif_soil(out_tif_dir, bin_dir):
|
||||
"""
|
||||
读取H-A-Alpha分解二进制数据,输出为矩阵格式的字典
|
||||
:param out_tif_dir : tif的输出路径
|
||||
:param bin_dir : 二进制数据的目录,包含.bin,.config
|
||||
:return out_tif_path: 生成tif的路径字典
|
||||
"""
|
||||
bin_paths = list(glob.glob(os.path.join(bin_dir, '*.bin')))
|
||||
out_tif_path = {}
|
||||
for in_path in bin_paths:
|
||||
name = os.path.split(in_path)[1].split('.')[0]
|
||||
out_path = os.path.join(out_tif_dir, name + '.tif')
|
||||
out_tif_path.update({name: out_path})
|
||||
if os.path.exists(os.path.split(out_path)[0]) is False:
|
||||
os.makedirs(os.path.split(out_path)[0])
|
||||
img_array = read_bin_to_img(in_path)
|
||||
img_array[np.isnan(img_array)] = 0 # 异常值填充为0
|
||||
# img_array = ml.standardization(img_array) # 数据标准化到[0,1]
|
||||
out_image = Image.fromarray(img_array)
|
||||
out_image.save(out_path)
|
||||
return out_tif_path
|
||||
|
||||
|
||||
|
|
@ -1,190 +0,0 @@
|
|||
from tool.algorithm.algtools.MetaDataHandler import Calibration
|
||||
from tool.algorithm.polsarpro.AHVToPolsarpro import AHVToPolsarpro
|
||||
from tool.algorithm.polsarpro.pspLeeRefinedFilterT3 import LeeRefinedFilterT3
|
||||
from tool.algorithm.polsarpro.pspCloudePottierDecomposition import PspCloudePottierDecomposition
|
||||
from tool.algorithm.polsarpro.pspFreemanDecomposition import PspFreemanDecomposition
|
||||
from tool.algorithm.polsarpro.pspYamaguchiDecomposition import PspYamaguchiDecomposition
|
||||
from tool.algorithm.polsarpro.pspTouziDecomposition import PspTouziDecomposition
|
||||
from tool.algorithm.polsarpro.bin2tif import write_bin_to_tif
|
||||
from tool.algorithm.polsarpro.pspHAAlphaDecomposition import PspHAAlphaDecomposition
|
||||
from tool.algorithm.xml.AlgXmlHandle import InitPara
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import glob
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
class CreateFeature:
|
||||
"""
|
||||
生产特征
|
||||
"""
|
||||
def __init__(self, debug = False, exe_dir = ''):
|
||||
self._debug = debug
|
||||
self._exe_dir = exe_dir
|
||||
pass
|
||||
|
||||
|
||||
def ahv_to_t3(self, workspace_processing_path, workspace_preprocessing_path, hh_hv_vh_vv_list, name='',FILTER_SIZE=3):
|
||||
# 全极化tif转bin格式T3数据
|
||||
atp = AHVToPolsarpro()
|
||||
|
||||
atp = AHVToPolsarpro(hh_hv_vh_vv_list)
|
||||
lee_filter_path = os.path.join(workspace_processing_path, name, 'lee_filter\\') # workspace_processing_path + name + '\\lee_filter\\'
|
||||
if self._debug == False:
|
||||
|
||||
t3_path = os.path.join(workspace_processing_path, name, 'psp_t3\\') # workspace_processing_path + name + '\\psp_t3\\'
|
||||
# atp.ahv_to_polsarpro_t3(t3_path, tif_path)
|
||||
|
||||
polarization = ['HH', 'HV', 'VH', 'VV']
|
||||
if os.path.exists(workspace_preprocessing_path + name + '\\'):
|
||||
meta_xml_paths = list(glob.glob(os.path.join(workspace_preprocessing_path + name, '*.meta.xml')))
|
||||
meta_dic = InitPara.get_meta_dic_new(meta_xml_paths, name)
|
||||
calibration = Calibration.get_Calibration_coefficient(meta_dic['Origin_META'], polarization)
|
||||
tif_path = atp.calibration(calibration, workspace_preprocessing_path, name)
|
||||
atp.ahv_to_polsarpro_t3_veg(t3_path, tif_path)
|
||||
|
||||
# Lee滤波
|
||||
leeFilter = LeeRefinedFilterT3()
|
||||
leeFilter.api_lee_refined_filter_T3('', t3_path, lee_filter_path, 0, 0, atp.rows(), atp.cols(), FILTER_SIZE)
|
||||
logger.info("refine_lee filter success!")
|
||||
return lee_filter_path
|
||||
|
||||
def decompose(self,workspace_processing_path, name, t3_path, rows, cols, hh_hv_vh_vv_dic={},FeatureInput=['Freeman', 'Yamaguchi', 'Cloude']): # , 'Touzi'
|
||||
"""
|
||||
极化分解:Freeman、Touzi、Yamaguchi、Cloude
|
||||
:param t3_path: t3文件路径
|
||||
:param rows: 影像行数
|
||||
:return cols:影像列数
|
||||
"""
|
||||
# 计算特征组合
|
||||
exeDir = self._exe_dir
|
||||
outFolderDic = {}
|
||||
if 'Freeman' in FeatureInput:
|
||||
# freeman分解
|
||||
freemanOutDir = os.path.join(workspace_processing_path, name + '\\freeman\\')
|
||||
if self._debug == False:
|
||||
freemDecom = PspFreemanDecomposition(exeDir, t3_path, freemanOutDir)
|
||||
flag = freemDecom.api_freeman_decomposition_T3(0, 0, rows, cols)
|
||||
if not flag:
|
||||
logger.error('FreemanDecomposition err')
|
||||
return False, None
|
||||
outFolderDic['Freeman'] = freemanOutDir
|
||||
|
||||
# Touzi分解
|
||||
if 'Touzi' in FeatureInput:
|
||||
|
||||
touziOutDir = os.path.join(workspace_processing_path, name + '\\touzi\\')
|
||||
if not os.path.exists(touziOutDir):
|
||||
os.makedirs(touziOutDir)
|
||||
if self._debug == False:
|
||||
# touzi分解耗时较长,且对特征表达效果较差
|
||||
p = PspTouziDecomposition(hh_hv_vh_vv_dic, touziOutDir)
|
||||
p.Touzi_decomposition_multiprocessing()
|
||||
outFolderDic['Touzi'] = touziOutDir
|
||||
|
||||
if 'Yamaguchi' in FeatureInput:
|
||||
# Yamaguchi分解
|
||||
yamaguchiOutDir = os.path.join(workspace_processing_path, name + '\\yamaguchi\\')
|
||||
if self._debug == False:
|
||||
yamaguchiDecom = PspYamaguchiDecomposition(exeDir, t3_path, yamaguchiOutDir)
|
||||
flag = yamaguchiDecom.api_yamaguchi_4components_decomposition_T3(0, 0, rows, cols)
|
||||
if not flag:
|
||||
logger.error('CloudePottierDecomposition err')
|
||||
return False, None
|
||||
outFolderDic['Yamaguchi'] = yamaguchiOutDir
|
||||
|
||||
if 'Cloude' in FeatureInput:
|
||||
# CloudePottier分解
|
||||
cloudeOutDir = os.path.join(workspace_processing_path, name + '\\cloude\\')
|
||||
if self._debug == False:
|
||||
cloudeDecom = PspCloudePottierDecomposition(
|
||||
exeDir, t3_path, cloudeOutDir)
|
||||
flag = cloudeDecom.api_h_a_alpha_decomposition_T3(
|
||||
0, 0, rows, cols)
|
||||
if not flag:
|
||||
logger.error('CloudePottierDecomposition err')
|
||||
return False, None
|
||||
outFolderDic['Cloude'] = cloudeOutDir
|
||||
return True, outFolderDic
|
||||
|
||||
def creat_h_a_alpha_features(self, t3_path, out_dir):
|
||||
logger.info('ahv transform to polsarpro T3 matrix success!')
|
||||
logger.info('progress bar: 20%')
|
||||
h_a_alpha_decomposition_T3_path = os.path.join(self._exe_dir, 'h_a_alpha_decomposition_T3.exe')
|
||||
h_a_alpha_eigenvalue_set_T3_path = os.path.join(self._exe_dir, 'h_a_alpha_eigenvalue_set_T3.exe')
|
||||
h_a_alpha_eigenvector_set_T3_path = os.path.join(self._exe_dir, 'h_a_alpha_eigenvector_set_T3.exe')
|
||||
|
||||
if self._debug == False:
|
||||
haa = PspHAAlphaDecomposition(normalization=True)
|
||||
haa.api_creat_h_a_alpha_features(h_a_alpha_out_dir=out_dir,
|
||||
h_a_alpha_decomposition_T3_path=h_a_alpha_decomposition_T3_path ,
|
||||
h_a_alpha_eigenvalue_set_T3_path=h_a_alpha_eigenvalue_set_T3_path ,
|
||||
h_a_alpha_eigenvector_set_T3_path=h_a_alpha_eigenvector_set_T3_path,
|
||||
polsarpro_in_dir=t3_path)
|
||||
|
||||
|
||||
def cereat_features_dic(self,outFolderDic, feature_tif_dir):
|
||||
|
||||
if not os.path.exists(feature_tif_dir):
|
||||
os.makedirs(feature_tif_dir)
|
||||
|
||||
feature_tif_paths = {}
|
||||
for key in outFolderDic:
|
||||
feature_bin_dic = outFolderDic[key]
|
||||
if key == 'Touzi':
|
||||
for path in list(glob.glob(os.path.join(feature_bin_dic, '*.tif'))):
|
||||
name = os.path.split(path)[1].split('.')[0]
|
||||
if self._debug == False:
|
||||
shutil.copyfile(path, os.path.join(feature_tif_dir, name + '.tif')) # feature_tif_dir + '\\' + name + '.tif')
|
||||
feature_tif_paths.update({name: os.path.join(feature_tif_dir, name + '.tif')}) # feature_tif_dir + '\\' + name + '.tif'
|
||||
else:
|
||||
feature_tif_paths.update(write_bin_to_tif(feature_tif_dir, feature_bin_dic))
|
||||
return feature_tif_paths
|
||||
|
||||
@staticmethod
|
||||
def decompose_single_tar(hh_hv_vh_vv_list, workspace_processing_path, workspace_preprocessing_path, name, exe_dir, rows, cols, FILTER_SIZE = 3, debug =False, FeatureInput=['Freeman', 'Yamaguchi', 'Cloude']):
|
||||
hh_hv_vh_vv_dic = {}
|
||||
hh_hv_vh_vv_dic.update({'HH': hh_hv_vh_vv_list[0]})
|
||||
hh_hv_vh_vv_dic.update({'HV': hh_hv_vh_vv_list[1]})
|
||||
hh_hv_vh_vv_dic.update({'VH': hh_hv_vh_vv_list[2]})
|
||||
hh_hv_vh_vv_dic.update({'VV': hh_hv_vh_vv_list[3]})
|
||||
t3_path = os.path.join(workspace_processing_path, name, "lee_filter") # workspace_processing_path + name + "\\lee_filter"
|
||||
feature_tif_dir = os.path.join(workspace_processing_path, name, 'features') # workspace_processing_path + name + "\\features"
|
||||
|
||||
cfeature = CreateFeature(debug, exe_dir)
|
||||
|
||||
cfeature.creat_h_a_alpha_features(t3_path, feature_tif_dir)
|
||||
|
||||
t3_path = cfeature.ahv_to_t3(workspace_processing_path, workspace_preprocessing_path, hh_hv_vh_vv_list, name, FILTER_SIZE)
|
||||
flag, outFolderDic = cfeature.decompose(workspace_processing_path, name, t3_path, rows, cols, hh_hv_vh_vv_dic, FeatureInput) # , 'Touzi'
|
||||
cfeature.cereat_features_dic(outFolderDic, feature_tif_dir)
|
||||
return feature_tif_dir
|
||||
|
||||
if __name__ == '__main__':
|
||||
# # 实例1:
|
||||
# exe_dir = os.getcwd()
|
||||
# dir = r'D:\MicroWorkspace\product\C-SAR\VegetationPhenology\Temporary\preprocessed/'
|
||||
# hh_hv_vh_vv_list = [dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_HH_preprocessed.tif',
|
||||
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_HV_preprocessed.tif',
|
||||
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_VH_preprocessed.tif',
|
||||
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_VV_preprocessed.tif']
|
||||
#
|
||||
# workspace_processing_path= r"D:\MicroWorkspace\product\C-SAR\VegetationPhenology\Temporary\processing/"
|
||||
# name= 'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC'
|
||||
# hh_hv_vh_vv_dic = {}
|
||||
# hh_hv_vh_vv_dic.update({'HH': hh_hv_vh_vv_list[0]})
|
||||
# hh_hv_vh_vv_dic.update({'HV': hh_hv_vh_vv_list[1]})
|
||||
# hh_hv_vh_vv_dic.update({'VH': hh_hv_vh_vv_list[2]})
|
||||
# hh_hv_vh_vv_dic.update({'VV': hh_hv_vh_vv_list[3]})
|
||||
# t3_path = workspace_processing_path + name + "\\lee_filter"
|
||||
# feature_tif_dir = workspace_processing_path + name + "\\features"
|
||||
#
|
||||
# cfeature = CreateFeature(False, exe_dir)
|
||||
#
|
||||
# cfeature.creat_h_a_alpha_features(t3_path, feature_tif_dir)
|
||||
#
|
||||
# t3_path = cfeature.ahv_to_t3(workspace_processing_path, hh_hv_vh_vv_list, name, 3)
|
||||
# flag, outFolderDic = cfeature.decompose(workspace_processing_path, name, t3_path, 997, 1227, hh_hv_vh_vv_dic, FeatureInput=['Freeman', 'Touzi', 'Yamaguchi', 'Cloude'])
|
||||
#
|
||||
# feature_tifs_dic = cfeature.cereat_features_dic(outFolderDic, feature_tif_dir)
|
||||
pass
|
File diff suppressed because it is too large
Load Diff
|
@ -1,132 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspHAAlphaDecomposition.py Cloude-Pottier分解
|
||||
@Function: Cloude-Pottier eigenvector/eigenvalue based decomposition of a 3x3 coherency matrix [T3]
|
||||
(Averaging using a sliding window)
|
||||
V1.0.1:(1)可选分解特征;(2)bin转tif格式
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/9/24 9:06
|
||||
@Version:1.0.1
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class PspCloudePottierDecomposition:
|
||||
"""
|
||||
调用polsarpro4.2.0的Cloude-Pottier极化分解 h_a_alpha_decomposition_T3.exe
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
exeDecomposeName='h_a_alpha_decomposition_T3.exe'):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
"""
|
||||
self.__exeName = exeDecomposeName
|
||||
self.__exeDir = exeDir
|
||||
self.__inT3Dir = inT3Dir
|
||||
self.__outDir = outDir
|
||||
self.__DecompostFlag = False
|
||||
pass
|
||||
|
||||
def api_h_a_alpha_decomposition_T3(
|
||||
self,
|
||||
rectX,
|
||||
rectY,
|
||||
rectWidth,
|
||||
rectHeight,
|
||||
Nwin=1):
|
||||
"""
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin :Size of the (Nwin, Nwin) sliding window used to compute local estimates. (int)
|
||||
"""
|
||||
if self.__DecompostFlag:
|
||||
return True
|
||||
if len(self.__exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
logger.error(self.__exeName + ' not exists.')
|
||||
return False
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(self.__exeDir + '\\' + self.__exeName):
|
||||
logger.error(self.__exeName + ' not exists.')
|
||||
return False
|
||||
exePath = self.__exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(self.__inT3Dir):
|
||||
logger.error('T3 Matrix check failed.')
|
||||
return False
|
||||
if not os.path.exists(self.__outDir):
|
||||
os.makedirs(self.__outDir)
|
||||
|
||||
alpbetdelgam = 1
|
||||
Lambda = 1
|
||||
alpha = 1
|
||||
entropy = 1
|
||||
anisotropy = 1
|
||||
|
||||
CombHA = 1
|
||||
CombH1mA = 1
|
||||
Comb1mHA = 1
|
||||
Comb1mH1mA = 1
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = rectWidth
|
||||
Sub_Ncol = rectHeight
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
self.__inT3Dir,
|
||||
self.__outDir,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol,
|
||||
alpbetdelgam,
|
||||
Lambda,
|
||||
alpha,
|
||||
entropy,
|
||||
anisotropy,
|
||||
CombHA,
|
||||
CombH1mA,
|
||||
Comb1mHA,
|
||||
Comb1mH1mA]
|
||||
cmd = " ".join(str(i) for i in para_list)
|
||||
config_path = os.path.join(self.__inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(self.__outDir, 'config.txt'))
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
self.__DecompostFlag = True
|
||||
return True
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
|
@ -1,109 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspFreemanDecomposition.py
|
||||
@Function:
|
||||
@Contact:
|
||||
@Author:LVY
|
||||
@Date:2021/10/12 18:45
|
||||
@Version:1.0.0
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import logging
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class PspFreemanDecomposition:
|
||||
"""
|
||||
Freeman分解
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
exeDecomposeName='freeman_decomposition_T3.exe'):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
"""
|
||||
self.__exeName = exeDecomposeName
|
||||
self.__exeDir = exeDir
|
||||
self.__inT3Dir = inT3Dir
|
||||
self.__outDir = outDir
|
||||
self.__DecompostFlag = False
|
||||
pass
|
||||
|
||||
def api_freeman_decomposition_T3(
|
||||
self,
|
||||
rectX,
|
||||
rectY,
|
||||
rectWidth,
|
||||
rectHeight,
|
||||
Nwin=1):
|
||||
"""
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin :Size of the (Nwin, Nwin) sliding window used to compute local estimates. (int)
|
||||
"""
|
||||
if self.__DecompostFlag:
|
||||
return True
|
||||
if len(self.__exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
logger.error(self.__exeName + ' not exists.')
|
||||
return False
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(self.__exeDir + '\\' + self.__exeName):
|
||||
logger.error(self.__exeName + ' not exists.')
|
||||
return False
|
||||
exePath = self.__exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(self.__inT3Dir):
|
||||
logger.error('T3 Matrix check failed.')
|
||||
return False
|
||||
if not os.path.exists(self.__outDir):
|
||||
os.makedirs(self.__outDir)
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = rectWidth
|
||||
Sub_Ncol = rectHeight
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
self.__inT3Dir,
|
||||
self.__outDir,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol]
|
||||
cmd = " ".join(str(i) for i in para_list)
|
||||
config_path = os.path.join(self.__inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(self.__outDir, 'config.txt'))
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
self.__DecompostFlag = True
|
||||
return True
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
|
@ -1,435 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspHAAlphaDecomposition.py
|
||||
@Function: Cloude-Pottier eigenvector/eigenvalue based decomposition of a 3x3 coherency matrix [T3]
|
||||
(Averaging using a sliding window)
|
||||
V1.0.1:(1)可选分解特征;(2)bin转tif格式
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/9/24 9:06
|
||||
@Version:1.0.1
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import struct
|
||||
import numpy as np
|
||||
import glob
|
||||
from PIL import Image
|
||||
import logging
|
||||
logger = logging.getLogger("mylog")
|
||||
import multiprocessing
|
||||
|
||||
class PspHAAlphaDecomposition:
|
||||
"""
|
||||
调用polsarpro4.2.0的Cloude-Pottier极化分解
|
||||
"""
|
||||
def __init__(self,normalization = False):
|
||||
self.__normalization = normalization #是否做归一化
|
||||
self.__res_h_a_alpha_decomposition_T3 = {}
|
||||
self.__res_h_a_alpha_eigenvalue_set_T3 = {}
|
||||
self.__res_h_a_alpha_eigenvector_set_T3 = {}
|
||||
pass
|
||||
|
||||
def api_creat_h_a_alpha_features_single_process(self, h_a_alpha_out_dir,
|
||||
h_a_alpha_decomposition_T3_path, h_a_alpha_eigenvalue_set_T3_path,
|
||||
h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir,is_trans_to_tif=True, is_read_to_dic=False):
|
||||
"""
|
||||
对porsarpro格式T3矩阵做Cloude-Pottier分解(h_a_alpha_decomposition、h_a_alpha_eigenvalue_set 和 h_a_alpha_eigenvector_set)
|
||||
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
|
||||
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
|
||||
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
|
||||
:param h_a_alpha_eigenvector_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
"""
|
||||
h_a_alpha_features ={}
|
||||
h_a_alpha_features.update(self.api_h_a_alpha_decomposition_T3(h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, is_trans_to_tif,is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1)))
|
||||
logger.info("run h_a_alpha_decomposition_T3 success!")
|
||||
logger.info('progress bar: 40%')
|
||||
h_a_alpha_features.update(self.api_h_a_alpha_eigenvalue_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, is_trans_to_tif, is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)))
|
||||
logger.info("run h_a_alpha_eigenvalue_set_T3 success!")
|
||||
logger.info('progress bar: 60%')
|
||||
h_a_alpha_features.update(self.api_h_a_alpha_eigenvector_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, is_trans_to_tif,is_read_to_dic, *(1, 1, 1, 1, 1)))
|
||||
logger.info("run h_a_alpha_eigenvector_set_T3 success!")
|
||||
logger.info('progress bar: 80%')
|
||||
if is_trans_to_tif:
|
||||
self.api_trans_T3_to_tif(h_a_alpha_out_dir, polsarpro_in_dir)
|
||||
if is_read_to_dic:
|
||||
h_a_alpha_features.update(self.api_read_T3_matrix(polsarpro_in_dir))
|
||||
return h_a_alpha_features
|
||||
|
||||
def api_creat_h_a_alpha_features(self, h_a_alpha_out_dir,
|
||||
h_a_alpha_decomposition_T3_path, h_a_alpha_eigenvalue_set_T3_path,
|
||||
h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir,is_trans_to_tif=True, is_read_to_dic=False):
|
||||
"""
|
||||
对porsarpro格式T3矩阵做Cloude-Pottier分解(h_a_alpha_decomposition、h_a_alpha_eigenvalue_set 和 h_a_alpha_eigenvector_set)
|
||||
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
|
||||
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
|
||||
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
|
||||
:param h_a_alpha_eigenvector_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
"""
|
||||
|
||||
pool = multiprocessing.Pool(processes=3)
|
||||
pl = []
|
||||
|
||||
logger.info("run h_a_alpha_decomposition_T3!")
|
||||
pl.append(pool.apply_async(self.api_h_a_alpha_decomposition_T3, (h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, is_trans_to_tif, is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1))))
|
||||
logger.info("run h_a_alpha_eigenvalue_set_T3!")
|
||||
pl.append(pool.apply_async(self.api_h_a_alpha_eigenvalue_set_T3, (h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, is_trans_to_tif, is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1))))
|
||||
logger.info("run h_a_alpha_eigenvector_set_T3!")
|
||||
pl.append(pool.apply_async(self.api_h_a_alpha_eigenvector_set_T3, (h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, is_trans_to_tif,is_read_to_dic, *(1, 1, 1, 1, 1))))
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
logger.info(pl)
|
||||
logger.info('progress bar: 60%')
|
||||
|
||||
h_a_alpha_features = {}
|
||||
h_a_alpha_features.update(self.__res_h_a_alpha_decomposition_T3)
|
||||
logger.info("run h_a_alpha_decomposition_T3 success!")
|
||||
h_a_alpha_features.update(self.__res_h_a_alpha_eigenvalue_set_T3)
|
||||
logger.info("run h_a_alpha_eigenvalue_set_T3 success!")
|
||||
h_a_alpha_features.update(self.__res_h_a_alpha_eigenvector_set_T3)
|
||||
logger.info("run h_a_alpha_eigenvector_set_T3 success!")
|
||||
if is_trans_to_tif:
|
||||
self.api_trans_T3_to_tif(h_a_alpha_out_dir, polsarpro_in_dir)
|
||||
if is_read_to_dic:
|
||||
h_a_alpha_features.update(self.api_read_T3_matrix(polsarpro_in_dir))
|
||||
return h_a_alpha_features
|
||||
|
||||
def api_h_a_alpha_decomposition_T3(self, h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, is_trans_to_tif=True, is_read_to_dic=False, *args):
|
||||
"""
|
||||
对porsarpro格式T3矩阵做Cloude-Pottier分解(H-A-Alpha分解)
|
||||
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
|
||||
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:param is_trans_to_tif:分解特征是否转换为tif
|
||||
:param is_read_to_dic:分解特征是否以字典输出
|
||||
:param *args:9个可选分解特征(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
|
||||
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0,输出:1
|
||||
:return : 包含分解特征的字典
|
||||
"""
|
||||
if not os.path.exists(h_a_alpha_out_dir):
|
||||
os.makedirs(h_a_alpha_out_dir)
|
||||
self.__h_a_alpha_decomposition_T3(h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, *args)
|
||||
name_list = ['entropy', 'anisotropy', 'alpha', 'beta', 'delta', 'gamma', 'lambda',
|
||||
'combination_1mH1mA', 'combination_1mHA', 'combination_H1mA', 'combination_HA']
|
||||
if is_trans_to_tif:
|
||||
self.__write_haalpha_to_tif(h_a_alpha_out_dir, h_a_alpha_out_dir, name_list)
|
||||
|
||||
if is_read_to_dic:
|
||||
self.__res_h_a_alpha_decomposition_T3 = self.__read_haalpha(h_a_alpha_out_dir, name_list)
|
||||
return self.__res_h_a_alpha_decomposition_T3
|
||||
else:
|
||||
return {}
|
||||
|
||||
def api_h_a_alpha_eigenvalue_set_T3(self, h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, is_trans_to_tif=True, is_read_to_dic=False, *args):
|
||||
|
||||
"""
|
||||
Cloude-Pottier eigenvalue based decomposition of a coherency matrix
|
||||
:param h_a_alpha_out_dir : Cloude-Pottier eigenvalue
|
||||
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:param is_trans_to_tif:分解特征是否转换为tif
|
||||
:param is_read_to_dic:分解特征是否以字典输出
|
||||
:param *args:9个可选分解特征(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
|
||||
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0,输出:1
|
||||
:return : 包含分解特征的字典
|
||||
"""
|
||||
if not os.path.exists(h_a_alpha_out_dir):
|
||||
os.makedirs(h_a_alpha_out_dir)
|
||||
self.__h_a_alpha_eigenvalue_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, *args)
|
||||
name_list = ['anisotropy', 'anisotropy_lueneburg', 'anisotropy12', 'asymetry', 'derd', 'derd_norm', 'entropy_shannon',
|
||||
'entropy_shannon_I', 'entropy_shannon_I_norm', 'entropy_shannon_norm', 'entropy_shannon_P',
|
||||
'entropy_shannon_P_norm', 'l1', 'l2', 'l3', 'p1', 'p2', 'p3', 'pedestal', 'polarisation_fraction',
|
||||
'rvi', 'serd', 'serd_norm']
|
||||
if is_trans_to_tif:
|
||||
self.__write_haalpha_to_tif(h_a_alpha_out_dir, h_a_alpha_out_dir, name_list)
|
||||
|
||||
if is_read_to_dic:
|
||||
self.__res_h_a_alpha_eigenvalue_set_T3 = self.__read_haalpha(h_a_alpha_out_dir, name_list)
|
||||
return self.__res_h_a_alpha_eigenvalue_set_T3
|
||||
else:
|
||||
return {}
|
||||
|
||||
def api_h_a_alpha_eigenvector_set_T3(self, h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, is_trans_to_tif=True, is_read_to_dic=False, *args):
|
||||
|
||||
"""
|
||||
Cloude-Pottier eigenvector based decomposition of a coherency matrix
|
||||
:param h_a_alpha_out_dir : Cloude-Pottier eigenvector
|
||||
:param h_a_alpha_eigenvector_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:param is_trans_to_tif:分解特征是否转换为tif
|
||||
:param is_read_to_dic:分解特征是否以字典输出
|
||||
:param *args:9个可选分解特征(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
|
||||
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0,输出:1
|
||||
:return : 包含分解特征的字典
|
||||
"""
|
||||
if not os.path.exists(h_a_alpha_out_dir):
|
||||
os.makedirs(h_a_alpha_out_dir)
|
||||
self.__h_a_alpha_eigenvector_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, *args)
|
||||
name_list = ['alpha', 'alpha1', 'alpha2', 'alpha3',
|
||||
'beta', 'beta1', 'beta2', 'beta3',
|
||||
'delta', 'delta1', 'delta2', 'delta3',
|
||||
'gamma', 'gamma1', 'gamma2', 'gamma3']
|
||||
if is_trans_to_tif:
|
||||
self.__write_haalpha_to_tif(h_a_alpha_out_dir, h_a_alpha_out_dir, name_list)
|
||||
|
||||
if is_read_to_dic:
|
||||
self.__res_h_a_alpha_eigenvector_set_T3 = self.__read_haalpha(h_a_alpha_out_dir, name_list)
|
||||
return self.__res_h_a_alpha_eigenvector_set_T3
|
||||
else:
|
||||
return {}
|
||||
|
||||
def api_read_T3_matrix(self,polsarpro_T3_dir):
|
||||
"""
|
||||
读取T3矩阵,转换字典
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:return : 包含T3矩阵的字典
|
||||
"""
|
||||
name_list = ['T11', 'T12_imag', 'T12_real',
|
||||
'T22', 'T13_imag', 'T13_real',
|
||||
'T33', 'T23_imag', 'T23_real']
|
||||
return self.__read_haalpha(polsarpro_T3_dir, name_list)
|
||||
|
||||
def api_trans_T3_to_tif(self, out_tif_dir, polsarpro_T3_dir):
|
||||
"""
|
||||
将T3矩阵从bin格式转换为tif格式
|
||||
:param out_tif_dir:保存路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
"""
|
||||
name_list = ['T11', 'T12_imag', 'T12_real',
|
||||
'T22', 'T13_imag', 'T13_real',
|
||||
'T33', 'T23_imag', 'T23_real']
|
||||
self.__write_haalpha_to_tif(out_tif_dir, polsarpro_T3_dir, name_list)
|
||||
|
||||
@staticmethod
|
||||
def __h_a_alpha_decomposition_T3(h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, *args):
|
||||
"""
|
||||
对porsarpro格式T3矩阵做Cloude-Pottier分解(H-A-Alpha分解)
|
||||
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
|
||||
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:param *args:9个可选输出变量(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
|
||||
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0,输出:1
|
||||
"""
|
||||
if not os.path.exists(h_a_alpha_decomposition_T3_path):
|
||||
raise Exception(h_a_alpha_decomposition_T3_path +' is not exists!')
|
||||
|
||||
NwinFilter = 1
|
||||
offsetRow = 0
|
||||
offsetCol = 0
|
||||
|
||||
config_path = os.path.join(polsarpro_in_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
|
||||
numRow = int(config[1])
|
||||
numCol = int(config[4])
|
||||
|
||||
alpbetdelgam = int(args[0])
|
||||
Lambda = int(args[1])
|
||||
alpha = int(args[2])
|
||||
entropy = int(args[3])
|
||||
anisotropy = int(args[4])
|
||||
|
||||
CombHA = int(args[5])
|
||||
CombH1mA = int(args[6])
|
||||
Comb1mHA = int(args[7])
|
||||
Comb1mH1mA = int(args[8])
|
||||
|
||||
para_list = [h_a_alpha_decomposition_T3_path, polsarpro_in_dir, h_a_alpha_out_dir,
|
||||
str(NwinFilter), str(offsetRow), str(offsetCol), str(numRow), str(numCol),
|
||||
str(alpbetdelgam), str(Lambda), str(alpha), str(entropy), str(anisotropy),
|
||||
str(CombHA), str(CombH1mA), str(Comb1mHA), str(Comb1mH1mA)]
|
||||
cmd = ' '.join(para_list)
|
||||
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1 or result_tuple[1].find('Could not open') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
shutil.copyfile(config_path, os.path.join(h_a_alpha_out_dir, 'config.txt'))
|
||||
|
||||
@staticmethod
|
||||
def __h_a_alpha_eigenvalue_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, *args):
|
||||
|
||||
"""
|
||||
Cloude-Pottier eigenvalue based decomposition of a coherency matrix
|
||||
:param h_a_alpha_out_dir : Cloude-Pottier eigenvalue
|
||||
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:param *args:11个可选输出变量(eigen123,proba123,anisotropy,anisotropy12,asymetry,
|
||||
polarisation_fraction,erd,rvi,pedestal,shannon,lueneburg),不输出:0,输出:1
|
||||
"""
|
||||
if not os.path.exists(h_a_alpha_eigenvalue_set_T3_path):
|
||||
raise Exception(h_a_alpha_eigenvalue_set_T3_path +' is not exists!')
|
||||
NwinFilter = 1
|
||||
offsetRow = 0
|
||||
offsetCol = 0
|
||||
|
||||
config_path = os.path.join(polsarpro_in_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
|
||||
numRow = int(config[1])
|
||||
numCol = int(config[4])
|
||||
|
||||
eigen123 = int(args[0])
|
||||
proba123 = int(args[1])
|
||||
anisotropy = int(args[2])
|
||||
anisotropy12 = int(args[3])
|
||||
asymetry = int(args[4])
|
||||
polarisation_fraction = int(args[5])
|
||||
erd = int(args[6])
|
||||
rvi = int(args[7])
|
||||
pedestal = int(args[8])
|
||||
shannon = int(args[9])
|
||||
lueneburg = int(args[10])
|
||||
|
||||
para_list = [h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, h_a_alpha_out_dir,
|
||||
str(NwinFilter), str(offsetRow), str(offsetCol), str(numRow), str(numCol),
|
||||
str(eigen123), str(proba123), str(anisotropy), str(anisotropy12), str(asymetry),
|
||||
str(polarisation_fraction), str(erd), str(rvi), str(pedestal),
|
||||
str(shannon), str(lueneburg)]
|
||||
cmd = ' '.join(para_list)
|
||||
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1 or result_tuple[1].find('Could not open') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
shutil.copyfile(config_path, os.path.join(h_a_alpha_out_dir, 'config.txt'))
|
||||
|
||||
@staticmethod
|
||||
def __h_a_alpha_eigenvector_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, *args):
|
||||
|
||||
"""
|
||||
Cloude-Pottier eigenvector based decomposition of a coherency matrix
|
||||
:param h_a_alpha_out_dir : Cloude-Pottier eigenvector
|
||||
:param h_a_alpha_eigenvector_set_T3_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
|
||||
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录,包含.bin,.config
|
||||
:param *args:5个可选输出变量(alpha123,beta123,delta123,gamma123,alpbetdelgam),不输出:0,输出:1
|
||||
"""
|
||||
if not os.path.exists(h_a_alpha_eigenvector_set_T3_path):
|
||||
raise Exception(h_a_alpha_eigenvector_set_T3_path +' is not exists!')
|
||||
NwinFilter = 1
|
||||
offsetRow = 0
|
||||
offsetCol = 0
|
||||
|
||||
config_path = os.path.join(polsarpro_in_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
|
||||
numRow = int(config[1])
|
||||
numCol = int(config[4])
|
||||
|
||||
alpha123 = int(args[0])
|
||||
beta123 = int(args[1])
|
||||
delta123 = int(args[2])
|
||||
gamma123 = int(args[3])
|
||||
alpbetdelgam = int(args[4])
|
||||
|
||||
para_list = [h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, h_a_alpha_out_dir,
|
||||
str(NwinFilter), str(offsetRow), str(offsetCol), str(numRow), str(numCol),
|
||||
str(alpha123), str(beta123), str(delta123), str(gamma123), str(alpbetdelgam)]
|
||||
cmd = ' '.join(para_list)
|
||||
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1 or result_tuple[1].find('Could not open') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
shutil.copyfile(config_path, os.path.join(h_a_alpha_out_dir, 'config.txt'))
|
||||
|
||||
def __read_haalpha(self, h_a_alpha_dir, name_list):
|
||||
"""
|
||||
读取H-A-Alpha分解二进制数据,输出为矩阵格式的字典
|
||||
:param h_a_alpha_dir : h_a_alpha二进制数据的目录,包含.bin,.config
|
||||
:name_list : 需要组合的名称集合['entropy', 'anisotropy', 'alpha', 'beta', 'delta', 'gamma', 'lambda',
|
||||
'combination_1mH1mA', 'combination_1mHA', 'combination_H1mA', 'combination_HA']
|
||||
:return : 包含H-A-Alpha矩阵信息的字典
|
||||
"""
|
||||
dir = os.path.join(h_a_alpha_dir, '*.bin')
|
||||
bin_paths = list(glob.glob(dir))
|
||||
haalpha_dic ={}
|
||||
for name in name_list:
|
||||
path = os.path.join(h_a_alpha_dir, name + '.bin')
|
||||
if path in bin_paths:
|
||||
img = self.__read_bin_to_img(path)
|
||||
haalpha_dic.update({name: img})
|
||||
return haalpha_dic
|
||||
|
||||
def standardization(self, data, num=1):
|
||||
# 矩阵标准化到[0,1]
|
||||
data[np.isnan(data)] = np.min(data) # 异常值填充为0
|
||||
_range = np.max(data) - np.min(data)
|
||||
return (data - np.min(data)) / _range * num
|
||||
|
||||
def __write_haalpha_to_tif(self, out_tif_dir, h_a_alpha_dir, name_list):
|
||||
"""
|
||||
读取H-A-Alpha分解二进制数据,输出为矩阵格式的字典
|
||||
:param out_tif_dir : tif的输出路径
|
||||
:param h_a_alpha_dir : h_a_alpha二进制数据的目录,包含.bin,.config
|
||||
:name_list : 需要组合的名称集合['entropy', 'anisotropy', 'alpha', 'beta', 'delta', 'gamma', 'lambda',
|
||||
'combination_1mH1mA', 'combination_1mHA', 'combination_H1mA', 'combination_HA']
|
||||
|
||||
"""
|
||||
dir = os.path.join(h_a_alpha_dir, '*.bin')
|
||||
bin_paths = list(glob.glob(dir))
|
||||
|
||||
for name in name_list:
|
||||
in_path = os.path.join(h_a_alpha_dir, name + '.bin')
|
||||
out_path = os.path.join(out_tif_dir, name + '.tif')
|
||||
if in_path in bin_paths:
|
||||
img_array = self.__read_bin_to_img(in_path)
|
||||
if self.__normalization is True:
|
||||
img_array = self.standardization(img_array, num=1)
|
||||
out_image = Image.fromarray(img_array)
|
||||
out_image.save(out_path)
|
||||
|
||||
@staticmethod
|
||||
def __read_bin_to_img(bin_path):
|
||||
"""
|
||||
读取bin格式二进制数据,输出为矩阵
|
||||
:param bin_path : bin文件的路径,包含.bin,.config
|
||||
:return : 矩阵信息
|
||||
"""
|
||||
(bin_dir, bin_name) = os.path.split(bin_path)
|
||||
config_path = os.path.join(bin_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
rows = int(config[1])
|
||||
cols = int(config[4])
|
||||
|
||||
bin_file = open(bin_path, 'rb') # 打开二进制文件
|
||||
size = os.path.getsize(bin_path) # 获得文件大小
|
||||
if size < rows*cols*4:
|
||||
raise Exception('bin size less than rows*cols*4! size:', size, 'byte, rows:', rows, 'cols:', cols)
|
||||
|
||||
img = np.zeros([rows, cols], dtype=np.float32)
|
||||
for row in range(rows):
|
||||
data = bin_file.read(4 * cols) # 每次读取一行的二进制数据
|
||||
row_data = struct.unpack('f' * cols, data) # 转为一行float数据
|
||||
img[row, :] = row_data
|
||||
bin_file.close()
|
||||
return img
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# h_a_alpha_decomposition_T3_path = 'D:\\PolSARpro_v4.2.0\\Soft\data_process_sngl\\h_a_alpha_decomposition_T3.exe'
|
||||
# h_a_alpha_eigenvalue_set_T3_path = 'D:\\PolSARpro_v4.2.0\\Soft\data_process_sngl\\h_a_alpha_eigenvalue_set_T3.exe'
|
||||
# h_a_alpha_eigenvector_set_T3_path = 'D:\\PolSARpro_v4.2.0\\Soft\data_process_sngl\\h_a_alpha_eigenvector_set_T3.exe'
|
||||
# polsarpro_in_dir = 'D:\\PolSARpro_v4.2.0\\in'
|
||||
# haalpha_out_dir = 'D:\\PolSARpro_v4.2.0\\out'
|
||||
# h_a_alpha_eigenvalue_set_T3_out = 'D:\\PolSARpro_v4.2.0\\out\\h_a_alpha_eigenvalue_set_T3'
|
||||
# h_a_alpha_eigenvector_set_T3_out = 'D:\\PolSARpro_v4.2.0\\out\\h_a_alpha_eigenvector_set_T3'
|
||||
#
|
||||
# haa = PspHAAlphaDecomposition()
|
||||
# h_a_alpha_features = haa.api_creat_h_a_alpha_features(haalpha_out_dir, h_a_alpha_decomposition_T3_path, h_a_alpha_eigenvalue_set_T3_path, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir)
|
||||
|
||||
|
||||
# haa = PspHAAlphaDecomposition(normalization=True)
|
||||
# psp_path = r"I:\MicroWorkspace\product\C-SAR\SoilSalinity\GF3B_MYC_QPSI_003581_E120.6_N31.3_20220729_L1A_AHV_L10000073024_RPCpsp_t3"
|
||||
# t3_path = r"I:\MicroWorkspace\product\C-SAR\SoilSalinity\t3"
|
||||
# exe_dir = r"I:\microproduct\soilSalinity/"
|
||||
# haa.api_creat_h_a_alpha_features(h_a_alpha_out_dir=t3_path,
|
||||
# h_a_alpha_decomposition_T3_path= exe_dir + 'h_a_alpha_decomposition_T3.exe',
|
||||
# h_a_alpha_eigenvalue_set_T3_path= exe_dir + 'h_a_alpha_eigenvalue_set_T3.exe',
|
||||
# h_a_alpha_eigenvector_set_T3_path=exe_dir +'h_a_alpha_eigenvector_set_T3.exe',
|
||||
# polsarpro_in_dir=psp_path)
|
||||
|
||||
# print('done')
|
|
@ -1,170 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspLeeRefinedFilterC2.py
|
||||
@Function:
|
||||
@Contact:
|
||||
@Author:SHJ
|
||||
@Date:2021/11/5
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import glob
|
||||
import numpy as np
|
||||
import struct
|
||||
from PIL import Image
|
||||
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class LeeRefinedFilterC2:
|
||||
"""
|
||||
调用polsarpro4.2.0的lee_refined_filter_C2.exe做精致Lee滤波
|
||||
"""
|
||||
|
||||
def __init__(self, exeFilterName='lee_refined_filter_C2.exe'):
|
||||
self.__exeName = exeFilterName
|
||||
pass
|
||||
|
||||
def api_lee_refined_filter_C2(
|
||||
self,
|
||||
exeDir,
|
||||
inC2Dir,
|
||||
outDir,
|
||||
off_row,
|
||||
off_col,
|
||||
Nrow,
|
||||
Ncol,
|
||||
Nwin=7,
|
||||
Nlook=1):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inC2Dir:C2矩阵目录
|
||||
:param outDir:输出目录
|
||||
:param off_row:行偏移,行启始位置
|
||||
:param off_col:列偏移,列启始位置
|
||||
:param Nrow:终止行
|
||||
:param Ncol:终止列
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
:param Nlook:一般是1
|
||||
"""
|
||||
if len(exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
raise Exception(self.__exeName + ' not exists.')
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(exeDir + '\\' + self.__exeName):
|
||||
raise Exception(
|
||||
exeDir +
|
||||
'\\' +
|
||||
self.__exeName +
|
||||
' not exists.')
|
||||
exePath = exeDir + '\\' + self.__exeName
|
||||
|
||||
# if not self._checkT3Matrix(inT3Dir):
|
||||
# raise Exception('T3 Matrix check failed.')
|
||||
if not os.path.exists(outDir):
|
||||
os.makedirs(outDir)
|
||||
if (Nwin % 2) == 0 or Nwin < 0: # 如果为偶数或小于0,则使用默认值
|
||||
Nwin = 7
|
||||
|
||||
Off_lig = off_row
|
||||
Off_col = off_col
|
||||
Sub_Nlig = Nrow
|
||||
Sub_Ncol = Ncol
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
inC2Dir,
|
||||
outDir,
|
||||
Nlook,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol]
|
||||
cmd = ' '.join(str(i) for i in para_list)
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
config_path = os.path.join(inC2Dir, 'config.txt')
|
||||
if config_path != os.path.join(outDir, 'config.txt'):
|
||||
shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
@staticmethod
|
||||
def _checkC2Matrix(C2Dir):
|
||||
# 检测C2矩阵
|
||||
if not os.path.exists(C2Dir):
|
||||
return False
|
||||
file_name_in_out = ['C11.bin', 'C12_real.bin', 'C12_imag.bin', 'C22.bin','config.txt']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(C2Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
||||
def write_bin_to_tif(self, out_tif_dir, bin_dir):
|
||||
"""
|
||||
读取H-A-Alpha分解二进制数据,输出为矩阵格式的字典
|
||||
:param out_tif_dir : tif的输出路径
|
||||
:param bin_dir : 二进制数据的目录,包含.bin,.config
|
||||
:return out_tif_path: 生成tif的路径字典
|
||||
"""
|
||||
bin_paths = list(glob.glob(os.path.join(bin_dir, '*.bin')))
|
||||
out_tif_path = {}
|
||||
for in_path in bin_paths:
|
||||
name = os.path.split(in_path)[1].split('.')[0]
|
||||
out_path = os.path.join(out_tif_dir, name + '.tif')
|
||||
out_tif_path.update({name: out_path})
|
||||
if os.path.exists(os.path.split(out_path)[0]) is False:
|
||||
os.makedirs(os.path.split(out_path)[0])
|
||||
img_array = self.__read_bin_to_img(in_path)
|
||||
img_array[np.isnan(img_array)] = 0 # 异常值填充为0
|
||||
# img_array = self.standardization(img_array) # 数据标准化到[0,1]
|
||||
out_image = Image.fromarray(img_array)
|
||||
out_image.save(out_path)
|
||||
return out_tif_path
|
||||
@staticmethod
|
||||
|
||||
def __read_bin_to_img(bin_path):
|
||||
"""
|
||||
读取bin格式二进制数据,输出为矩阵
|
||||
:param bin_path : bin文件的路径,包含.bin,.config
|
||||
:return : 矩阵信息
|
||||
"""
|
||||
(bin_dir, bin_name) = os.path.split(bin_path)
|
||||
config_path = os.path.join(bin_dir, 'config.txt')
|
||||
config = open(config_path, 'r').read().split('\n', -1)
|
||||
rows = int(config[1])
|
||||
cols = int(config[4])
|
||||
|
||||
bin_file = open(bin_path, 'rb') # 打开二进制文件
|
||||
size = os.path.getsize(bin_path) # 获得文件大小
|
||||
if size < rows * cols * 4:
|
||||
raise Exception(
|
||||
'bin size less than rows*cols*4! size:',
|
||||
size,
|
||||
'byte, rows:',
|
||||
rows,
|
||||
'cols:',
|
||||
cols)
|
||||
|
||||
img = np.zeros([rows, cols], dtype=np.float32)
|
||||
for row in range(rows):
|
||||
data = bin_file.read(4 * cols) # 每次读取一行的二进制数据
|
||||
row_data = struct.unpack('f' * cols, data) # 转为一行float数据
|
||||
img[row, :] = row_data
|
||||
bin_file.close()
|
||||
return img
|
||||
|
||||
if __name__ == '__main__':
|
||||
tp =LeeRefinedFilterC2()
|
||||
inC2Dir=r'E:\MicroWorkspace\LandCover\HHHV1'
|
||||
outDir =r'E:\MicroWorkspace\LandCover\HHHV1_f'
|
||||
off_row = 0
|
||||
off_col = 0
|
||||
Nrow = 666
|
||||
Ncol = 746
|
||||
tp.api_lee_refined_filter_C2( '',inC2Dir,outDir,off_row,off_col,Nrow,Ncol)
|
||||
tp.write_bin_to_tif(outDir,outDir)
|
||||
print('done')
|
|
@ -1,104 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspLeeRefinedFilterT3.py
|
||||
@Function: Cloude-Pottier eigenvector/eigenvalue based decomposition of a 3x3 coherency matrix [T3]
|
||||
(Averaging using a sliding window)
|
||||
@Contact:
|
||||
@Author:LVY
|
||||
@Date:2021/10/12 9:06
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class LeeRefinedFilterT3:
|
||||
"""
|
||||
调用polsarpro4.2.0的lee_refined_filter_T3.exe做精致Lee滤波
|
||||
"""
|
||||
|
||||
def __init__(self, exeFilterName='lee_refined_filter_T3.exe'):
|
||||
self.__exeName = exeFilterName
|
||||
pass
|
||||
|
||||
def api_lee_refined_filter_T3(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
rectX,
|
||||
rectY,
|
||||
rectWidth,
|
||||
rectHeight,
|
||||
Nwin=7,
|
||||
Nlook=1):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
:param Nlook:一般是1
|
||||
"""
|
||||
if len(exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
raise Exception(self.__exeName + ' not exists.')
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(exeDir + '\\' + self.__exeName):
|
||||
raise Exception(
|
||||
exeDir +
|
||||
'\\' +
|
||||
self.__exeName +
|
||||
' not exists.')
|
||||
exePath = exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(inT3Dir):
|
||||
raise Exception('T3 Matrix check failed.')
|
||||
if not os.path.exists(outDir):
|
||||
os.makedirs(outDir)
|
||||
if (Nwin % 2) == 0 or Nwin < 0: # 如果为偶数或小于0,则使用默认值
|
||||
Nwin = 7
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = rectWidth
|
||||
Sub_Ncol = rectHeight
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
Nlook,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol]
|
||||
cmd = ' '.join(str(i) for i in para_list)
|
||||
config_path = os.path.join(inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
|
@ -1,393 +0,0 @@
|
|||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class SurfaceInversionDubois:
|
||||
"""
|
||||
调用polsarpro4.2.0的surface_inversion_dubois.exe做土壤水分反演
|
||||
"""
|
||||
|
||||
def __init__(self, exeFilterName='surface_inversion_dubois.exe'):
|
||||
self.__exeName = exeFilterName
|
||||
pass
|
||||
|
||||
def api_surface_inversion_dubois(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
incidence,
|
||||
rectX,
|
||||
rectY,
|
||||
row,
|
||||
col,
|
||||
frequency, # GHZ
|
||||
angleFlag, # 0:deg, 1:rad
|
||||
):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
:param Nlook:一般是1
|
||||
"""
|
||||
if len(exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
raise Exception(self.__exeName + ' not exists.')
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(exeDir + '\\' + self.__exeName):
|
||||
raise Exception(
|
||||
exeDir +
|
||||
'\\' +
|
||||
self.__exeName +
|
||||
' not exists.')
|
||||
exePath = exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(inT3Dir):
|
||||
raise Exception('T3 Matrix check failed.')
|
||||
if not os.path.exists(outDir):
|
||||
os.makedirs(outDir)
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = row
|
||||
Sub_Ncol = col
|
||||
dataFormat = 'T3'
|
||||
calibration_flag = 1
|
||||
calibration_coefficient = 0.0
|
||||
threshold_HHHH_VVVV = 0.0
|
||||
threshold_HVHV_VVVV = 0.0
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
dataFormat,
|
||||
incidence,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol,
|
||||
frequency, # GHZ
|
||||
angleFlag,
|
||||
]
|
||||
|
||||
cmd = "surface_inversion_dubois.exe -id {} -od {} -iodf {} -ang {} -ofr {} -ofc {} -fnr {} -fnc {} -fr {} -un {} -caf {} -cac {} -th1 {} -th2 {}".format(
|
||||
inT3Dir, outDir, dataFormat, incidence, Off_lig, Off_col, Sub_Nlig, Sub_Ncol, frequency, angleFlag,
|
||||
calibration_flag, calibration_coefficient, threshold_HHHH_VVVV, threshold_HVHV_VVVV)
|
||||
|
||||
logger.info('surface_inversion_dubois:{}'.format(cmd))
|
||||
result = os.system(cmd)
|
||||
logger.info('cmd_result:{}'.format(result))
|
||||
logger.info('surface_inversion_dubois finish!')
|
||||
|
||||
config_path = os.path.join(inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
|
||||
# cmd = ' '.join(str(i) for i in para_list)
|
||||
# config_path = os.path.join(inT3Dir, 'config.txt')
|
||||
# shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
# result_tuple = subprocess.getstatusoutput(cmd)
|
||||
#
|
||||
# if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
# raise Exception(result_tuple[1])
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class SurfaceInversionHisto:
|
||||
"""
|
||||
调用polsarpro4.2.0的surface_inversion_histo.exe做土壤水分反演
|
||||
"""
|
||||
|
||||
def __init__(self, exeFilterName='surface_inversion_histo.exe'):
|
||||
self.__exeName = exeFilterName
|
||||
pass
|
||||
|
||||
def api_surface_inversion_histo(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
rectX,
|
||||
rectY,
|
||||
rectWidth,
|
||||
rectHeight,
|
||||
Nwin=7,
|
||||
Nlook=1):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
:param Nlook:一般是1
|
||||
"""
|
||||
if len(exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
raise Exception(self.__exeName + ' not exists.')
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(exeDir + '\\' + self.__exeName):
|
||||
raise Exception(
|
||||
exeDir +
|
||||
'\\' +
|
||||
self.__exeName +
|
||||
' not exists.')
|
||||
exePath = exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(inT3Dir):
|
||||
raise Exception('T3 Matrix check failed.')
|
||||
if not os.path.exists(outDir):
|
||||
os.makedirs(outDir)
|
||||
if (Nwin % 2) == 0 or Nwin < 0: # 如果为偶数或小于0,则使用默认值
|
||||
Nwin = 7
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = rectWidth
|
||||
Sub_Ncol = rectHeight
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
Nlook,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol]
|
||||
cmd = ' '.join(str(i) for i in para_list)
|
||||
config_path = os.path.join(inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class SurfaceInversionOh:
|
||||
"""
|
||||
调用polsarpro4.2.0的surface_inversion_oh.exe做土壤水分反演
|
||||
"""
|
||||
|
||||
def __init__(self, exeFilterName='surface_inversion_oh.exe'):
|
||||
self.__exeName = exeFilterName
|
||||
pass
|
||||
|
||||
def api_surface_inversion_oh(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
rectX,
|
||||
rectY,
|
||||
rectWidth,
|
||||
rectHeight,
|
||||
Nwin=7,
|
||||
Nlook=1):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
:param Nlook:一般是1
|
||||
"""
|
||||
if len(exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
raise Exception(self.__exeName + ' not exists.')
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(exeDir + '\\' + self.__exeName):
|
||||
raise Exception(
|
||||
exeDir +
|
||||
'\\' +
|
||||
self.__exeName +
|
||||
' not exists.')
|
||||
exePath = exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(inT3Dir):
|
||||
raise Exception('T3 Matrix check failed.')
|
||||
if not os.path.exists(outDir):
|
||||
os.makedirs(outDir)
|
||||
if (Nwin % 2) == 0 or Nwin < 0: # 如果为偶数或小于0,则使用默认值
|
||||
Nwin = 7
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = rectWidth
|
||||
Sub_Ncol = rectHeight
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
Nlook,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol]
|
||||
cmd = ' '.join(str(i) for i in para_list)
|
||||
config_path = os.path.join(inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
class SurfaceInversionOh2004:
|
||||
"""
|
||||
调用polsarpro4.2.0的surface_inversion_oh2004.exe做土壤水分反演
|
||||
"""
|
||||
|
||||
def __init__(self, exeFilterName='surface_inversion_oh2004.exe'):
|
||||
self.__exeName = exeFilterName
|
||||
pass
|
||||
|
||||
def api_surface_inversion_oh2004(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
incidence,
|
||||
rectY,
|
||||
rectX,
|
||||
row,
|
||||
col,
|
||||
frequency, # GHZ
|
||||
angleFlag):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
:param Nlook:一般是1
|
||||
"""
|
||||
if len(exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
raise Exception(self.__exeName + ' not exists.')
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(exeDir + '\\' + self.__exeName):
|
||||
raise Exception(
|
||||
exeDir +
|
||||
'\\' +
|
||||
self.__exeName +
|
||||
' not exists.')
|
||||
exePath = exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(inT3Dir):
|
||||
raise Exception('T3 Matrix check failed.')
|
||||
if not os.path.exists(outDir):
|
||||
os.makedirs(outDir)
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = row
|
||||
Sub_Ncol = col
|
||||
dataFormat = 'T3'
|
||||
threshold_mv = 1.0
|
||||
threshold_s = 7.0
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
dataFormat,
|
||||
incidence,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol,
|
||||
frequency, # GHZ
|
||||
angleFlag,
|
||||
threshold_mv,
|
||||
threshold_s]
|
||||
cmd = "surface_inversion_oh2004.exe -id {} -od {} -iodf {} -ang {} -ofr {} -ofc {} -fnr {} -fnc {} -fr {} -un {} -th1 {} -th2 {}".format(
|
||||
inT3Dir, outDir, dataFormat, incidence, Off_lig, Off_col, Sub_Nlig, Sub_Ncol, frequency, angleFlag, threshold_mv, threshold_s)
|
||||
|
||||
logger.info('surface_inversion_oh2004:{}'.format(cmd))
|
||||
result = os.system(cmd)
|
||||
logger.info('cmd_result:{}'.format(result))
|
||||
logger.info('surface_inversion_oh2004 finish!')
|
||||
|
||||
|
||||
config_path = os.path.join(inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(outDir, 'config.txt'))
|
||||
# cmd = ' '.join(str(i) for i in para_list)
|
||||
# result_tuple = subprocess.getstatusoutput(cmd)
|
||||
# #
|
||||
# if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
# raise Exception(result_tuple[1])
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
# file_name_in_out = ['T11.img', 'T12_real.img', 'T12_imag.img',
|
||||
# 'T13_real.img', 'T13_imag.img', 'T22.img',
|
||||
# 'T23_real.img', 'T23_imag.img', 'T33.img']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
|
@ -1,146 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspTouziDecomposition.py
|
||||
@Function:
|
||||
@Contact:
|
||||
@Author:LVY
|
||||
@Date:2021/10/14 10:11
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import os
|
||||
import logging
|
||||
from tool.algorithm.polsarpro.polarizationDecomposition import ModTouzi as TouziDecomp
|
||||
from osgeo import gdal
|
||||
import multiprocessing
|
||||
from tool.algorithm.block.blockprocess import BlockProcess
|
||||
import shutil
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.file.fileHandle import fileHandle
|
||||
logger = logging.getLogger("mylog")
|
||||
file =fileHandle(False)
|
||||
|
||||
class PspTouziDecomposition:
|
||||
"""
|
||||
Touzi分解
|
||||
"""
|
||||
def __init__(self, inDic, outDir):
|
||||
"""
|
||||
:param inDic:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
"""
|
||||
self.__inDic = inDic
|
||||
self.__outDir = outDir
|
||||
self.__DecompostFlag = False
|
||||
if self._checkTifFileDic(self.__inDic) is False:
|
||||
return False
|
||||
if not os.path.exists(self.__outDir):
|
||||
os.makedirs(self.__outDir)
|
||||
|
||||
def api_Touzi_decomposition_TIF(self, Nwin = 5):
|
||||
"""
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
"""
|
||||
bandHH = gdal.Open(self.__inDic["HH"])
|
||||
bandHV = gdal.Open(self.__inDic["HV"])
|
||||
bandVH = gdal.Open(self.__inDic["VH"])
|
||||
bandVV = gdal.Open(self.__inDic["VV"])
|
||||
bandAll = [bandHH, bandHV, bandVH, bandVV]
|
||||
decomposition = TouziDecomp(bandAll, Nwin)
|
||||
decomposition.get_result(self.__outDir)
|
||||
return True
|
||||
|
||||
def Touzi_decomposition_TIF(self,hh_path,hv_path,vh_path,vv_path,out_dir,suffix,Nwin = 5):
|
||||
"""
|
||||
:param Nwin:滤波窗口大小 3 5 7 9 11
|
||||
"""
|
||||
bandHH = gdal.Open(hh_path)
|
||||
bandHV = gdal.Open(hv_path)
|
||||
bandVH = gdal.Open(vh_path)
|
||||
bandVV = gdal.Open(vv_path)
|
||||
bandAll = [bandHH, bandHV, bandVH, bandVV]
|
||||
decomposition = TouziDecomp(bandAll, Nwin)
|
||||
decomposition.get_result_block(out_dir, suffix)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def _checkTifFileDic(inDic):
|
||||
file_name_in_out = ['HH', 'VV', 'HV', 'VH']
|
||||
for item in file_name_in_out:
|
||||
if item in inDic:
|
||||
print(inDic[item])
|
||||
if not os.path.exists(os.path.join(inDic[item])):
|
||||
return False
|
||||
else:
|
||||
return False
|
||||
return True
|
||||
|
||||
def Touzi_decomposition_multiprocessing(self):
|
||||
|
||||
#创建工作文件夹
|
||||
src_path = os.path.join(self.__outDir, "src_img")
|
||||
block_path = os.path.join(self.__outDir, "block")
|
||||
decomposition_path = os.path.join(self.__outDir, "feature")
|
||||
file.creat_dirs([src_path,block_path,decomposition_path])
|
||||
|
||||
shutil.copyfile(self.__inDic["HH"], os.path.join(src_path, "HH.tif"))
|
||||
shutil.copyfile(self.__inDic["HV"], os.path.join(src_path, "HV.tif"))
|
||||
shutil.copyfile(self.__inDic["VH"], os.path.join(src_path, "VH.tif"))
|
||||
shutil.copyfile(self.__inDic["VV"], os.path.join(src_path, "VV.tif"))
|
||||
self.__cols = ImageHandler.get_img_width(self.__inDic["HH"])
|
||||
self.__rows = ImageHandler.get_img_height(self.__inDic["HH"])
|
||||
# 分块
|
||||
bp = BlockProcess()
|
||||
block_size = bp.get_block_size(self.__rows, self.__cols)
|
||||
bp.cut(src_path, block_path, ['tif', 'tiff'], 'tif', block_size)
|
||||
logger.info('blocking tifs success!')
|
||||
|
||||
img_dir, img_name = bp.get_file_names(block_path, ['tif'])
|
||||
dir_dict = bp.get_same_img(img_dir, img_name)
|
||||
|
||||
hh_list, vv_list, hv_list, vh_list = None, None, None, None
|
||||
for key in dir_dict.keys():
|
||||
tmp = key.split('_', 2)[0]
|
||||
if tmp == 'HH':
|
||||
hh_list = dir_dict[key]
|
||||
elif tmp == 'VV':
|
||||
vv_list = dir_dict[key]
|
||||
elif tmp == 'HV':
|
||||
hv_list = dir_dict[key]
|
||||
elif tmp == 'VH':
|
||||
vh_list = dir_dict[key]
|
||||
|
||||
processes_num = min([len(hh_list), multiprocessing.cpu_count() - 1])
|
||||
|
||||
# 开启多进程处理
|
||||
pool = multiprocessing.Pool(processes=processes_num)
|
||||
|
||||
for i in range(len(hh_list)):
|
||||
suffix = bp.get_suffix(os.path.basename(hh_list[i]))
|
||||
# self.Touzi_decomposition_TIF(hh_list[i], hv_list[i], vh_list[i], vv_list[i], block_path, suffix,5)
|
||||
pool.apply_async(self.Touzi_decomposition_TIF, (hh_list[i], hv_list[i], vh_list[i], vv_list[i], decomposition_path, suffix,5))
|
||||
logger.info('total:%s, block:%s touzi!', len(hh_list), i)
|
||||
|
||||
pool.close()
|
||||
pool.join()
|
||||
# 合并处理后的影像
|
||||
bp.combine(decomposition_path, self.__cols, self.__rows, self.__outDir, file_type=['tif'], datetype='float16')
|
||||
|
||||
file.del_folder(src_path)
|
||||
file.del_folder(block_path)
|
||||
file.del_folder(decomposition_path)
|
||||
pass
|
||||
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# dir = {}
|
||||
# dir.update({"HH":"I:\preprocessed\HH_preprocessed.tif"})
|
||||
# dir.update({"HV":"I:\preprocessed\HV_preprocessed.tif"})
|
||||
# dir.update({"VH":"I:\preprocessed\VH_preprocessed.tif"})
|
||||
# dir.update({"VV":"I:\preprocessed\VV_preprocessed.tif"})
|
||||
#
|
||||
#
|
||||
# p = PspTouziDecomposition(dir, "I:/preprocessed/")
|
||||
# p.Touzi_decomposition_multiprocessing()
|
||||
# pass
|
||||
|
|
@ -1,104 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project:__init__.py
|
||||
@File:pspFreemanDecomposition.py
|
||||
@Function:
|
||||
@Contact:
|
||||
@Author:LVY
|
||||
@Date:2021/10/12 18:45
|
||||
@Version:1.0.0
|
||||
"""
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import logging
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class PspYamaguchiDecomposition:
|
||||
"""
|
||||
Yamaguchi yamaguchi_3components_decomposition_T3.exe yamaguchi_4components_decomposition_T3.exe
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
exeDir,
|
||||
inT3Dir,
|
||||
outDir,
|
||||
exeDecomposeName='yamaguchi_4components_decomposition_T3.exe'):
|
||||
"""
|
||||
:param exeDir:exe所在目录
|
||||
:param inT3Dir:T3矩阵目录
|
||||
:param outDir:输出目录
|
||||
"""
|
||||
self.__exeName = exeDecomposeName
|
||||
self.__exeDir = exeDir
|
||||
self.__inT3Dir = inT3Dir
|
||||
self.__outDir = outDir
|
||||
self.__DecompostFlag = False
|
||||
pass
|
||||
|
||||
def api_yamaguchi_4components_decomposition_T3(
|
||||
self, rectX, rectY, rectWidth, rectHeight, Nwin=1):
|
||||
"""
|
||||
:param rectX:有效区域x
|
||||
:param rectY:有效区域y
|
||||
:param rectWidth:有效区域宽
|
||||
:param rectHeight:有效区域高
|
||||
:param Nwin :Size of the (Nwin, Nwin) sliding window used to compute local estimates. (int)
|
||||
"""
|
||||
if self.__DecompostFlag:
|
||||
return True
|
||||
if len(self.__exeDir) == 0:
|
||||
if not os.path.exists(self.__exeName):
|
||||
logger.error(self.__exeName + ' not exists.')
|
||||
return False
|
||||
exePath = self.__exeName
|
||||
else:
|
||||
if not os.path.exists(self.__exeDir + '\\' + self.__exeName):
|
||||
logger.error(self.__exeName + ' not exists.')
|
||||
return False
|
||||
exePath = self.__exeDir + '\\' + self.__exeName
|
||||
|
||||
if not self._checkT3Matrix(self.__inT3Dir):
|
||||
logger.error('T3 Matrix check failed.')
|
||||
return False
|
||||
if not os.path.exists(self.__outDir):
|
||||
os.makedirs(self.__outDir)
|
||||
|
||||
Off_lig = rectX
|
||||
Off_col = rectY
|
||||
Sub_Nlig = rectWidth
|
||||
Sub_Ncol = rectHeight
|
||||
|
||||
para_list = [
|
||||
exePath,
|
||||
self.__inT3Dir,
|
||||
self.__outDir,
|
||||
Nwin,
|
||||
Off_lig,
|
||||
Off_col,
|
||||
Sub_Nlig,
|
||||
Sub_Ncol]
|
||||
cmd = " ".join(str(i) for i in para_list)
|
||||
config_path = os.path.join(self.__inT3Dir, 'config.txt')
|
||||
shutil.copyfile(config_path, os.path.join(self.__outDir, 'config.txt'))
|
||||
result_tuple = subprocess.getstatusoutput(cmd)
|
||||
|
||||
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1:
|
||||
raise Exception(result_tuple[1])
|
||||
self.__DecompostFlag = True
|
||||
return True
|
||||
@staticmethod
|
||||
def _checkT3Matrix(T3Dir):
|
||||
# 检测T3矩阵
|
||||
if not os.path.exists(T3Dir):
|
||||
return False
|
||||
file_name_in_out = ['T11.bin', 'T12_real.bin', 'T12_imag.bin',
|
||||
'T13_real.bin', 'T13_imag.bin', 'T22.bin',
|
||||
'T23_real.bin', 'T23_imag.bin', 'T33.bin']
|
||||
for item in file_name_in_out:
|
||||
if not os.path.exists(T3Dir + "\\" + item):
|
||||
return False
|
||||
return True
|
Binary file not shown.
|
@ -1,194 +0,0 @@
|
|||
import os
|
||||
cimport cython # 必须导入
|
||||
import numpy as np##必须为c类型和python类型的数据都申明一个np
|
||||
cimport numpy as np # 必须为c类型和python类型的数据都申明一个np
|
||||
from libc.math cimport pi,ceil,floor
|
||||
from scipy.interpolate import griddata
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#####################
|
||||
# 结构定义区
|
||||
####################
|
||||
cdef struct Point: # 结构
|
||||
double x
|
||||
double y
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
######################
|
||||
# 射线法
|
||||
######################
|
||||
cdef int rayCasting(Point p,np.ndarray[double,ndim=2] poly):
|
||||
cdef double px = p.x,
|
||||
cdef double py = p.y,
|
||||
cdef int flag = 0
|
||||
|
||||
cdef int i=0
|
||||
cdef int l=poly.shape[0]
|
||||
cdef int j=l-1
|
||||
|
||||
cdef double sx
|
||||
cdef double sy
|
||||
cdef double tx
|
||||
cdef double ty
|
||||
cdef x=0
|
||||
while(i<l):
|
||||
sx=poly[i,0]
|
||||
sy=poly[i,1]
|
||||
tx=poly[j,0]
|
||||
ty=poly[j,1]
|
||||
# 点与多边形顶点重合
|
||||
if((sx == px and sy == py) or (tx == px and ty == py)):
|
||||
return 1
|
||||
#// 判断线段两端点是否在射线两侧
|
||||
if((sy < py and ty >= py) or (sy >= py and ty < py)) :
|
||||
#// 线段上与射线 Y 坐标相同的点的 X 坐标
|
||||
x = sx + (py - sy) * (tx - sx) / (ty - sy)
|
||||
#// 点在多边形的边上
|
||||
if(x == px):
|
||||
return 1
|
||||
|
||||
#// 射线穿过多边形的边界
|
||||
if(x > px):
|
||||
flag = 0 if flag==1 else 1
|
||||
# 循环体
|
||||
j=i
|
||||
i=i+1
|
||||
|
||||
#// 射线穿过多边形边界的次数为奇数时点在多边形内
|
||||
return 1 if flag==1 else 0
|
||||
|
||||
cpdef np.ndarray[double,ndim=2] insert_data(np.ndarray[double,ndim=2] ori2geo_img,np.ndarray[int , ndim=1] row_ids,np.ndarray[int,ndim=1] col_ids,np.ndarray[double,ndim=1] data):
|
||||
cdef int i=0
|
||||
cdef int count=row_ids.shape[0]
|
||||
while i<count:
|
||||
ori2geo_img[row_ids[i],col_ids[i]]=data[i]
|
||||
i=i+1
|
||||
return ori2geo_img
|
||||
|
||||
cpdef np.ndarray[double,ndim=2] cut_L1A_img(np.ndarray[double,ndim=3] ori2geo_img,np.ndarray[double,ndim=2] roi_list):
|
||||
""" 根据roi 获取栅格对象
|
||||
"""
|
||||
cdef int height=ori2geo_img.shape[1]
|
||||
cdef int width=ori2geo_img.shape[2]
|
||||
cdef int i=0
|
||||
cdef int j=0
|
||||
cdef Point temp_p
|
||||
cdef np.ndarray[double,ndim=2] mask=np.zeros((height,width),dtype=np.float64)
|
||||
while i<height:
|
||||
j=0
|
||||
while j<width:
|
||||
temp_p.x=ori2geo_img[0,i,j] # temp_p
|
||||
temp_p.y=ori2geo_img[1,i,j] # temp_p
|
||||
if rayCasting(temp_p,roi_list)==1:
|
||||
mask[i,j]=1
|
||||
else:
|
||||
mask[i,j]=np.nan
|
||||
j=j+1
|
||||
i=i+1
|
||||
return mask
|
||||
|
||||
cpdef np.ndarray[double,ndim=2] gereratorMask(np.ndarray[double,ndim=1] rlist,np.ndarray[double,ndim=1] clist,np.ndarray[double,ndim=2] mask):
|
||||
cdef int rcount=rlist.shape[0]
|
||||
cdef int ccount=clist.shape[0]
|
||||
cdef int count=rcount if rcount<ccount else ccount
|
||||
cdef int i=0
|
||||
cdef int j=0
|
||||
cdef int temp_row=0
|
||||
cdef int temp_col=0
|
||||
cdef int height=mask.shape[0]
|
||||
cdef int width=mask.shape[1]
|
||||
while i<count:
|
||||
|
||||
# 1
|
||||
temp_row=int(ceil(rlist[i]))
|
||||
temp_col=int(ceil(clist[i]))
|
||||
|
||||
if temp_row>=0 and temp_col>=0 and temp_row<height and temp_col<width:
|
||||
mask[temp_row,temp_col]=1
|
||||
|
||||
# 2
|
||||
temp_row=int(floor(rlist[i]))
|
||||
temp_col=int(ceil(clist[i]))
|
||||
if temp_row>=0 and temp_col>=0 and temp_row<height and temp_col<width:
|
||||
mask[temp_row,temp_col]=1
|
||||
|
||||
# 3
|
||||
temp_row=int(ceil(rlist[i]))
|
||||
temp_col=int(floor(clist[i]))
|
||||
if temp_row>=0 and temp_col>=0 and temp_row<height and temp_col<width:
|
||||
mask[temp_row,temp_col]=1
|
||||
# 4
|
||||
temp_row=int(floor(rlist[i]))
|
||||
temp_col=int(floor(clist[i]))
|
||||
if temp_row>=0 and temp_col>=0 and temp_row<height and temp_col<width:
|
||||
mask[temp_row,temp_col]=1
|
||||
i=i+1
|
||||
|
||||
return mask
|
||||
|
||||
|
||||
cdef double distance_powe(Point p1,Point p2):
|
||||
return (p1.x-p2.x)**2+(p1.y-p2.y)**2
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
cpdef np.ndarray[int,ndim=2] get_r_c(np.ndarray[double,ndim=3] ori2geo,np.ndarray[double,ndim=2] lon_lat):
|
||||
cdef int p_count=lon_lat.shape[0]
|
||||
cdef int height=ori2geo.shape[1]
|
||||
cdef int width=ori2geo.shape[2]
|
||||
cdef int i=0
|
||||
cdef int j=0
|
||||
cdef int c=0
|
||||
cdef double dist=999
|
||||
cdef double temp_dist=0
|
||||
cdef Point p1
|
||||
cdef Point p2
|
||||
cdef int min_i
|
||||
cdef int min_j
|
||||
cdef np.ndarray[double,ndim=2] result=np.ones((p_count,2))*-1
|
||||
|
||||
|
||||
# 范围
|
||||
cdef double min_lon=np.min(ori2geo[0,:,:])
|
||||
cdef double max_lon=np.max(ori2geo[0,:,:])
|
||||
cdef double min_lat=np.min(ori2geo[1,:,:])
|
||||
cdef double max_lat=np.max(ori2geo[1,:,:])
|
||||
while c<p_count:
|
||||
p1.x=lon_lat[c,0]
|
||||
p1.y=lon_lat[c,1]
|
||||
if min_lon>p1.x or max_lon<p1.x or p2.y<min_lat or p2.y>max_lat:
|
||||
continue
|
||||
c=c+1
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# 测试程序
|
||||
cpdef np.ndarray[double,ndim=2] Add(np.ndarray[double,ndim=2] a,double x):
|
||||
cdef double d=0; # 声明 注意 cython没有 bool类型
|
||||
print("调用成功")
|
||||
print(a)
|
||||
print(x)
|
||||
return a+x
|
||||
|
File diff suppressed because it is too large
Load Diff
Binary file not shown.
Binary file not shown.
Binary file not shown.
|
@ -1,45 +0,0 @@
|
|||
from setuptools import setup
|
||||
from setuptools.extension import Extension
|
||||
from Cython.Distutils import build_ext
|
||||
from Cython.Build import cythonize
|
||||
import numpy
|
||||
from pathlib import Path
|
||||
import shutil
|
||||
|
||||
|
||||
class MyBuildExt(build_ext):
|
||||
def run(self):
|
||||
build_ext.run(self)
|
||||
|
||||
build_dir = Path(self.build_lib)
|
||||
root_dir = Path(__file__).parent
|
||||
target_dir = build_dir if not self.inplace else root_dir
|
||||
|
||||
self.copy_file(Path('./SAR_geo') / '__init__.py', root_dir, target_dir)
|
||||
#self.copy_file(Path('./pkg2') / '__init__.py', root_dir, target_dir)
|
||||
self.copy_file(Path('.') / '__init__.py', root_dir, target_dir)
|
||||
def copy_file(self, path, source_dir, destination_dir):
|
||||
if not (source_dir / path).exists():
|
||||
return
|
||||
shutil.copyfile(str(source_dir / path), str(destination_dir / path))
|
||||
|
||||
setup(
|
||||
name="MyModule",
|
||||
ext_modules=cythonize(
|
||||
[
|
||||
#Extension("pkg1.*", ["root/pkg1/*.py"]),
|
||||
Extension("pkg2.*", ["./SAR_geo/SAR_GEO.pyx"]),
|
||||
#Extension("1.*", ["root/*.py"])
|
||||
],
|
||||
build_dir="build",
|
||||
compiler_directives=dict(
|
||||
always_allow_keywords=True
|
||||
)),
|
||||
cmdclass=dict(
|
||||
build_ext=MyBuildExt
|
||||
),
|
||||
packages=[],
|
||||
include_dirs=[numpy.get_include()],
|
||||
)
|
||||
|
||||
# 指令: python setup.py build_ext --inplace
|
File diff suppressed because it is too large
Load Diff
|
@ -1,730 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :AlgXmlHandle.py
|
||||
@Function :算法描述文件读写和检查
|
||||
@Contact :https://www.cnblogs.com/feifeifeisir/p/10893127.html
|
||||
@Author :SHJ
|
||||
@Date :2021/9/6
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import logging
|
||||
from xml.etree.ElementTree import ElementTree
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.file.fileHandle import fileHandle
|
||||
import os
|
||||
import re
|
||||
import platform
|
||||
import psutil
|
||||
import multiprocessing
|
||||
import ctypes
|
||||
logger = logging.getLogger("mylog")
|
||||
import glob
|
||||
|
||||
class ManageAlgXML:
|
||||
"""
|
||||
检查和读取XML文件信息
|
||||
"""
|
||||
def __init__(self, xml_path):
|
||||
self.in_path = xml_path
|
||||
self.__tree = ElementTree()
|
||||
self.__root = None
|
||||
self.__alg_compt = None
|
||||
self.__workspace_path = None
|
||||
self.__taskID = None
|
||||
self.__algorithm_name = None
|
||||
self.__envs = {}
|
||||
self.__input_paras = {}
|
||||
self.__output_paras = {}
|
||||
self.__init_flag = False
|
||||
|
||||
|
||||
def init_xml(self):
|
||||
"""
|
||||
初始化XML文件
|
||||
:return: True:初始化成功 False: 初始化失败
|
||||
"""
|
||||
try:
|
||||
self.__tree.parse(self.in_path)
|
||||
except FileNotFoundError as ex:
|
||||
msg = ex + "xml_path = " + self.in_path
|
||||
raise Exception(msg)
|
||||
except BaseException:
|
||||
raise Exception("cannot open algXMl")
|
||||
|
||||
self.__root = self.__tree.getroot()
|
||||
if self.__root is None:
|
||||
raise Exception("get root failed")
|
||||
|
||||
self.__alg_compt = self.__root.find("AlgCompt")
|
||||
if self.__alg_compt is None:
|
||||
raise Exception("get AlgCompt failed")
|
||||
|
||||
self.__workspace_path = self.__check_workspace_path()
|
||||
if self.__workspace_path is None:
|
||||
raise Exception("check workspace_path failed")
|
||||
|
||||
self.__taskID = self.__check_task_id()
|
||||
if self.__taskID is None:
|
||||
raise Exception("check taskID failed")
|
||||
|
||||
self.__algorithm_name = self.__check_algorithm_name()
|
||||
if self.__algorithm_name is None:
|
||||
raise Exception("check AlgorithmName failed")
|
||||
|
||||
self.__envs = self.__check_environment()
|
||||
if self.__envs is None or self.__envs == {}:
|
||||
raise Exception("check environment failed")
|
||||
|
||||
self.__input_paras = self.__check_input_para()
|
||||
if self.__input_paras is None or self.__input_paras == {}:
|
||||
raise Exception("check input para failed")
|
||||
|
||||
self.__output_paras = self.__check_output_para()
|
||||
|
||||
self.__init_flag = True
|
||||
return True
|
||||
|
||||
def get_workspace_path(self):
|
||||
"""
|
||||
获取工作空间路径
|
||||
:return: 工作空间路径, None-异常
|
||||
"""
|
||||
if not self.__init_flag:
|
||||
raise Exception("XML is not initialized")
|
||||
return self.__workspace_path
|
||||
|
||||
def get_task_id(self):
|
||||
"""
|
||||
获取任务ID
|
||||
:return: taskID, None-异常
|
||||
"""
|
||||
if not self.__init_flag:
|
||||
raise Exception("XML is not initialized")
|
||||
return self.__taskID
|
||||
|
||||
def get_algorithm_name(self):
|
||||
"""
|
||||
获取算法名
|
||||
:return:
|
||||
"""
|
||||
if not self.__init_flag:
|
||||
raise Exception("AlgorithmName is not initialized")
|
||||
return self.__algorithm_name
|
||||
|
||||
def get_envs(self):
|
||||
"""
|
||||
获取运行环境要求
|
||||
:return:运行环境要求, None-异常
|
||||
"""
|
||||
if not self.__init_flag:
|
||||
raise Exception("XML is not initialized")
|
||||
return self.__envs
|
||||
|
||||
def get_input_paras(self):
|
||||
"""
|
||||
获取输入参数
|
||||
:return:输入参数, None-异常
|
||||
"""
|
||||
if not self.__init_flag:
|
||||
raise Exception("XML is not initialized")
|
||||
return self.__input_paras
|
||||
|
||||
def get_output_paras(self):
|
||||
"""
|
||||
获取输出参数
|
||||
:return:输出参数, None-异常
|
||||
"""
|
||||
if not self.__init_flag:
|
||||
raise Exception("XML is not initialized")
|
||||
return self.__output_paras
|
||||
|
||||
def __check_workspace_path(self):
|
||||
"""
|
||||
检查工作空间路径
|
||||
:return: 工作空间路径, None-异常
|
||||
"""
|
||||
workspace_note = self.__root.find("WorkSpace")
|
||||
workspace_path = str(workspace_note.text).replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
if workspace_path is None:
|
||||
raise Exception("'workspace_path' is None")
|
||||
if not os.path.isdir(workspace_path):
|
||||
raise Exception("'workspace_path' is not save:%s",workspace_path)
|
||||
|
||||
if workspace_path[-1] != '\\':
|
||||
workspace_path += "'\'"
|
||||
|
||||
return workspace_path
|
||||
|
||||
def __check_environment(self):
|
||||
"""
|
||||
检查XML文件中运行环境要求
|
||||
:return: dic-运行环境要求, None-异常
|
||||
"""
|
||||
env_note = self.__alg_compt.find("Environment")
|
||||
|
||||
is_cluster = int(env_note.find("IsCluster").text.replace("\n", "").replace(' ', ''))
|
||||
is_legal = is_cluster in [0, 1]
|
||||
if not is_legal:
|
||||
raise Exception("IsCluster is not 0 or 1")
|
||||
|
||||
cluster_num = int(env_note.find("ClusterNum").text)
|
||||
is_legal = cluster_num in [0, 1, 2, 3, 4, 5, 6, 7]
|
||||
if not is_legal:
|
||||
raise Exception("cluster_num is beyond [0,1,2,3,4,5,6,7]")
|
||||
|
||||
operating_system = env_note.find("OperatingSystem").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
# is_legal = operating_system in ["Windows10", "Windows7", "WindowsXP"]
|
||||
# if not is_legal:
|
||||
# raise Exception("OperatingSystem is beyond [Windows10, Windows7, WindowsXP]")
|
||||
|
||||
cpu = env_note.find("CPU").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
is_legal = cpu in ["单核", "双核", "3核", "4核", "6核", "8核"]
|
||||
if not is_legal:
|
||||
raise Exception("OperatingSystem is beyond [单核, 双核, 3核, 4核, 6核, 8核]")
|
||||
|
||||
memory = env_note.find("Memory").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
is_legal = memory in ["1GB", "2GB", "4GB", "6GB", "8GB", "10GB", "12GB", "16GB"]
|
||||
# if not is_legal:
|
||||
# raise Exception("OperatingSystem is beyond [1GB, 2GB, 4GB, 6GB, 8GB, 10GB, 12GB, 16GB]")
|
||||
|
||||
storage = env_note.find("Storage").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
is_legal = int(storage[:-2]) > 0
|
||||
if not is_legal:
|
||||
raise Exception("Storage < 0GB")
|
||||
|
||||
network_card = env_note.find("NetworkCard").text
|
||||
# is_legal = network_card in ["无需求"]
|
||||
# if not is_legal:
|
||||
# # 输出异常
|
||||
# return
|
||||
|
||||
band_width = env_note.find("Bandwidth").text
|
||||
# is_legal = band_width in ["无需求"]
|
||||
# if not is_legal:
|
||||
# # 输出异常
|
||||
# return
|
||||
|
||||
gpu = env_note.find("GPU").text
|
||||
# is_legal = GPU in ["无需求"]
|
||||
# if not is_legal:
|
||||
# # 输出异常
|
||||
# return
|
||||
envs = {"is_Cluster": is_cluster, "cluster_num": cluster_num, "operating_system": operating_system,
|
||||
"CPU": cpu, "memory": memory}
|
||||
envs.update({"Storage": storage, "network_card": network_card, "band_width": band_width, "GPU": gpu})
|
||||
return envs
|
||||
|
||||
def __check_input_para(self):
|
||||
"""
|
||||
检查XML文件中输入参数
|
||||
:return: dic-输入参数, None-异常
|
||||
"""
|
||||
input_paras_note = self.__alg_compt.find("Inputs")
|
||||
paras_num = int(input_paras_note.attrib.get("ParameterNum"))
|
||||
para_list = input_paras_note.findall("Parameter")
|
||||
|
||||
if paras_num != len(para_list):
|
||||
msg ="'ParameterNum':"+ str(paras_num) + " != number of 'Parameter':" + str(len(para_list))
|
||||
logger.warning(msg)
|
||||
|
||||
input_paras = {}
|
||||
for para in para_list:
|
||||
para_name = para.find("ParaName").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
para_chs_name = para.find("ParaChsName").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
para_type = para.find("ParaType").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
data_type = para.find("DataType").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
para_value = para.find("ParaValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
input_para = {"ParaName": para_name, "ParaChsName": para_chs_name, "ParaType": para_type,
|
||||
"DataType": data_type, "ParaValue": para_value}
|
||||
#print(para_name)
|
||||
if para_type == "Value":
|
||||
# max_value = para.find("MaxValue").text
|
||||
# min_value = para.find("MinValue").text
|
||||
# option_value = para.find("OptionValue").text.replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
# input_para.update({"MaxValue": max_value, "MinValue": min_value, "OptionValue": option_value})
|
||||
# input_para.update({"OptionValue": option_value}) todo
|
||||
pass
|
||||
if para_name is None or para_type is None or para_value is None:
|
||||
msg = 'there is None among para_name:' + para_name + ',para_type:' + para_type + 'or para_value:' + para_value + '!'
|
||||
raise Exception(msg)
|
||||
|
||||
input_paras.update({para_name: input_para})
|
||||
return input_paras
|
||||
|
||||
def __check_output_para(self):
|
||||
"""
|
||||
检查XML文件中输出参数
|
||||
:return: dic-输出参数, None-异常
|
||||
"""
|
||||
output_paras_note = self.__alg_compt.find("Outputs")
|
||||
paras_num = int(output_paras_note.attrib.get("ParameterNum"))
|
||||
para_list = output_paras_note.findall("Parameter")
|
||||
|
||||
if paras_num != len(para_list):
|
||||
raise Exception("'ParameterNum' != number of 'Parameter'")
|
||||
output_paras = {}
|
||||
return output_paras
|
||||
|
||||
def write_out_para(self, para_name, para_value):
|
||||
"""
|
||||
写入输出参数
|
||||
"""
|
||||
output_paras_note = self.__alg_compt.find("Outputs")
|
||||
para_list = output_paras_note.findall("Parameter")
|
||||
flag = False
|
||||
for para in para_list:
|
||||
if para.find("ParaName").text == para_name:
|
||||
para.find("ParaValue").text = para_value
|
||||
flag = True
|
||||
if flag == False:
|
||||
raise Exception('Cannot find Output Parameter:'+para_name+'!')
|
||||
self.__tree.write(self.in_path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def __check_task_id(self):
|
||||
"""
|
||||
检查任务ID
|
||||
:return: taskID, None-异常
|
||||
"""
|
||||
task_id_note = self.__root.find("TaskID")
|
||||
task_id = str(task_id_note.text).replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
if task_id is None:
|
||||
raise Exception("'TaskID' is None")
|
||||
return task_id
|
||||
|
||||
def __check_algorithm_name(self):
|
||||
|
||||
algorithm_name_note = self.__alg_compt.find("AlgorithmName")
|
||||
algorithm_name = str(algorithm_name_note.text).replace("\n", "").replace(' ', '') #去除空格和回车
|
||||
if algorithm_name is None:
|
||||
raise Exception("'AlgorithmName' is None")
|
||||
return algorithm_name
|
||||
|
||||
|
||||
class CheckSource:
|
||||
"""
|
||||
检查配置文件中资源的完整性和有效性
|
||||
"""
|
||||
def __init__(self, alg_xml_handle):
|
||||
self.__alg_xml_handle = alg_xml_handle
|
||||
self.imageHandler = ImageHandler()
|
||||
self.__ParameterDic={}
|
||||
|
||||
|
||||
def check_alg_xml(self):
|
||||
"""
|
||||
检查算法配置文件
|
||||
"""
|
||||
if self.__alg_xml_handle.init_xml():
|
||||
logger.info('init algXML succeed')
|
||||
return True
|
||||
else:
|
||||
raise Exception('init algXML failed')
|
||||
|
||||
def check_run_env(self):
|
||||
"""
|
||||
:return: True-正常,False-异常
|
||||
"""
|
||||
envs = self.__alg_xml_handle.get_envs()
|
||||
# 检查操作系统
|
||||
local_plat = platform.platform()
|
||||
local_plat_list = local_plat.split("-")
|
||||
flag = envs['operating_system'] == local_plat_list[0]+local_plat_list[1]
|
||||
if flag is False:
|
||||
msg = 'operating_system:' + local_plat_list[0] + local_plat_list[1] + ' is not ' + envs['operating_system']
|
||||
#raise Exception(msg)
|
||||
|
||||
# 检查电脑显存
|
||||
mem = psutil.virtual_memory()
|
||||
mem_total = int(round(mem.total / 1024 / 1024 / 1024, 0))
|
||||
mem_free = round(mem.free / 1024 / 1024 / 1024, 0)
|
||||
env_memory = envs['memory']
|
||||
env_memory = int(env_memory[:-2])
|
||||
if env_memory > mem_total:
|
||||
msg = 'memory_total ' + str(mem_total) + ' less than'+str(env_memory) + 'GB'
|
||||
# raise Exception(msg)
|
||||
|
||||
if env_memory >= mem_free:
|
||||
msg = 'mem_free ' + str(mem_free) + 'GB less than' + str(env_memory) + 'GB'
|
||||
logger.warning(msg)
|
||||
|
||||
# 检查CPU核数
|
||||
env_cpu = envs['CPU']
|
||||
if env_cpu == "单核":
|
||||
env_cpu_core_num = 1
|
||||
elif env_cpu == "双核":
|
||||
env_cpu_core_num = 2
|
||||
elif env_cpu == "三核":
|
||||
env_cpu_core_num = 3
|
||||
else:
|
||||
env_cpu_core_num = int(env_cpu[:-1])
|
||||
|
||||
local_cpu_core_num = int(multiprocessing.cpu_count()/2)
|
||||
if env_cpu_core_num > local_cpu_core_num:
|
||||
msg = 'CPU_core_num ' + str(local_cpu_core_num) + 'core less than' + str(env_cpu_core_num) + ' core'
|
||||
# raise Exception(msg)
|
||||
|
||||
# 检查磁盘的内存
|
||||
env_storage = envs['Storage']
|
||||
env_storage = int(env_storage[:-2])
|
||||
workspace_path = self.__alg_xml_handle.get_workspace_path()
|
||||
if not os.path.isdir(workspace_path):
|
||||
raise Exception('workspace_path:%s do not exist!', workspace_path)
|
||||
|
||||
local_storage = self.__get_free_space_mb(workspace_path)
|
||||
if env_storage > local_storage:
|
||||
msg = 'workspace storage ' + str(local_storage) + 'GB less than' + envs['Storage'] +"GB"
|
||||
# raise Exception(msg)
|
||||
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def __get_free_space_mb(folder):
|
||||
"""
|
||||
:param folder:检查的路径 eg:'C:\\'
|
||||
:return: folder/drive free space (GB)
|
||||
"""
|
||||
if platform.system() == 'Windows':
|
||||
free_bytes = ctypes.c_ulonglong(0)
|
||||
ctypes.windll.kernel32.GetDiskFreeSpaceExW(ctypes.c_wchar_p(folder), None, None, ctypes.pointer(free_bytes))
|
||||
return free_bytes.value / 1024 / 1024 / 1024
|
||||
else:
|
||||
st = os.statvfs(folder)
|
||||
return st.f_bavail * st.f_frsize / 1024 / 1024
|
||||
|
||||
def check_input_paras(self, input_para_names):
|
||||
"""
|
||||
:param input_para_names :需要检查参数的名称列表[name1,name2,...]
|
||||
:return: 检测是否正常
|
||||
"""
|
||||
workspace_path = self.__alg_xml_handle.get_workspace_path()
|
||||
input_paras = self.__alg_xml_handle.get_input_paras()
|
||||
for name in input_para_names:
|
||||
para = input_paras[name]
|
||||
if para is None:
|
||||
msg = "check para:"+name + " is failed!"+"para is None!"
|
||||
raise Exception(msg)
|
||||
|
||||
if para['ParaType'] == 'File':
|
||||
if para['DataType'] == 'tif':
|
||||
if para['ParaValue'] != 'empty' and para['ParaValue'] != 'Empty'and para['ParaValue'] != '':
|
||||
para_value_list = para['ParaValue'].split(";")
|
||||
for para_value in para_value_list:
|
||||
para_path = para_value
|
||||
if self.__check_tif(para_path) is False:
|
||||
msg = "check para:"+name + " is failed!" + "Path:" + para_path
|
||||
raise Exception(msg)
|
||||
|
||||
if para['DataType'] == 'xml':
|
||||
para_path = para['ParaValue']
|
||||
if not os.path.exists(para_path):
|
||||
raise Exception('para_file:%s is inexistent!', para_path)
|
||||
|
||||
if para['DataType'] == 'File':
|
||||
para_path = para['ParaValue']
|
||||
if os.path.isdir(para_path) is False:
|
||||
msg = "check para:" + name + " is failed!" + "FilePath:" + para_path
|
||||
raise Exception(msg)
|
||||
if para["DataType"]=="ymal":
|
||||
para_path = para['ParaValue']
|
||||
if os.path.isfile(para_path) is False:
|
||||
msg = "check para: " + name + " is failed! " + " FilePath: " + para_path
|
||||
raise Exception(msg)
|
||||
|
||||
elif para['ParaType'] == 'Value':
|
||||
if para['DataType'] == 'float' or para['DataType'] == 'int' or para['DataType'] == 'double':
|
||||
if para['ParaValue'] is None:
|
||||
msg = "check para:"+name + " is failed!"+"'ParaValue' is None"
|
||||
raise Exception(msg)
|
||||
if self.__is_number(para['ParaValue']) is False:
|
||||
raise Exception("para:"+name+" is not number!")
|
||||
# if (para['MaxValue'] is not None) and (self.__is_number(para['MaxValue']) is True):
|
||||
# value = para['ParaValue']
|
||||
# max = para['MaxValue']
|
||||
# if float(value) > float(max):
|
||||
# msg = "para:" + name + " > max, para:" + value + "max:" + max
|
||||
# raise Exception(msg)
|
||||
# if (para['MinValue'] is not None) and (self.__is_number(para['MinValue']) is True):
|
||||
# value = para['ParaValue']
|
||||
# min = para['MinValue']
|
||||
# if float(value) < float(min):
|
||||
# msg = "para:" + name + " < min, para:" + value + "min:" + min
|
||||
# raise Exception(msg)
|
||||
|
||||
self.__ParameterDic[name] = para['ParaValue']
|
||||
__workspace_path = workspace_path
|
||||
__input_paras = input_paras
|
||||
return True, self.__ParameterDic
|
||||
|
||||
def check_output_paras(self, output_para_names):
|
||||
"""
|
||||
:param output_para_names :需要检查参数的名称列表[name1,name2,...]
|
||||
:return: Ture or False
|
||||
"""
|
||||
workspace_path = self.__alg_xml_handle.get_workspace_path()
|
||||
output_paras = self.__alg_xml_handle.get_output_paras()
|
||||
|
||||
for name in output_para_names:
|
||||
para = output_paras[name]
|
||||
#print(para)
|
||||
if para is None:
|
||||
msg = "check para:" + name + " is failed!" + "para is None!"
|
||||
raise Exception(msg)
|
||||
|
||||
if para['ParaType'] == 'File':
|
||||
if para['DataType'] == 'tif':
|
||||
para_path = workspace_path + para['ParaValue']
|
||||
para_dir = os.path.split(para_path)
|
||||
flag_isdir = os.path.isdir(para_dir[0])
|
||||
flag_istif = (para_dir[1].split(".", 1)[1] == "tif")
|
||||
if flag_isdir and flag_istif is False:
|
||||
msg = "check para:" + name + " is failed!" + para_path + "is invalid!"
|
||||
raise Exception(msg)
|
||||
|
||||
if para['DataType'] == 'File':
|
||||
para_path = workspace_path + para['ParaValue']
|
||||
if os.path.isdir(para_path) is False:
|
||||
os.makedirs(para_path)
|
||||
if os.path.isdir(para_path) is False:
|
||||
msg = "check para:" + name + " is failed!" + para_path + "is invalid!"
|
||||
raise Exception(msg)
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def __is_number(str_num):
|
||||
"""
|
||||
:param str_num :检查str是否为float或者double
|
||||
:return: True or False
|
||||
"""
|
||||
if str_num[0] == '-':
|
||||
str_num = str_num[1:]
|
||||
pattern = re.compile(r'(.*)\.(.*)\.(.*)')
|
||||
if pattern.match(str_num):
|
||||
return False
|
||||
return str_num.replace(".", "").isdigit()
|
||||
|
||||
def __check_tif(self, filename):
|
||||
"""
|
||||
:filename: 文件的路径
|
||||
:return: True or False
|
||||
"""
|
||||
if self.imageHandler.get_dataset(filename) is None:
|
||||
msg = "read tif error!,finame: " + filename
|
||||
raise Exception(msg)
|
||||
return True
|
||||
|
||||
|
||||
class InitPara:
|
||||
def __init__(self,debug = False):
|
||||
self._debug = debug
|
||||
|
||||
@staticmethod
|
||||
def init_processing_paras(input_paras):
|
||||
"""
|
||||
:param names:字典列表,每个字典为一个输入产品的配置信息
|
||||
"""
|
||||
processing_paras = {}
|
||||
for name in input_paras:
|
||||
para = input_paras[name]
|
||||
if para is None:
|
||||
logger.error(name + "is None!")
|
||||
return False
|
||||
|
||||
if para['ParaType'] == 'File':
|
||||
if para['DataType'] == 'tif' or para['DataType'] == 'csv':
|
||||
para_value_list = para['ParaValue'].split(";")
|
||||
if len(para_value_list) == 1:
|
||||
para_path = para['ParaValue']
|
||||
if para_path != 'empty' and para_path != '':
|
||||
processing_paras.update({name: para_path})
|
||||
else:
|
||||
for n, para_value in zip(range(len(para_value_list)), para_value_list):
|
||||
processing_paras.update({name+str(n): para_value})
|
||||
elif para['DataType'] == 'tar.gz':
|
||||
paths = para['ParaValue'].split(';')
|
||||
for n, path in zip(range(len(paths)), paths):
|
||||
processing_paras.update({'sar_path' + str(n): path})
|
||||
else:
|
||||
para_path = para['ParaValue']
|
||||
processing_paras.update({name: para_path})
|
||||
|
||||
elif para['ParaType'] == 'Value':
|
||||
if para['DataType'] == 'float':
|
||||
value = float(para['ParaValue'])
|
||||
elif para['DataType'] == 'int':
|
||||
value = int(para['ParaValue'])
|
||||
else: # 默认string
|
||||
value = para['ParaValue']
|
||||
processing_paras.update({name: value})
|
||||
elif para['ParaType'] == 'String':
|
||||
value = para['ParaValue']
|
||||
if value == 'empty':
|
||||
continue
|
||||
else:
|
||||
processing_paras.update({name: value})
|
||||
return processing_paras
|
||||
|
||||
# 获取文件夹内的文件
|
||||
|
||||
@staticmethod
|
||||
def get_tif_paths(file_dir,name):
|
||||
in_tif_paths = []
|
||||
if os.path.exists(file_dir + name + '\\'):
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir + name + '\\', '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(file_dir + name + '\\', '*.tiff')))
|
||||
if in_tif_paths1 != []:
|
||||
in_tif_paths = in_tif_paths + in_tif_paths1
|
||||
else:
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir, '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(file_dir, '*.tiff')))
|
||||
if in_tif_paths != []:
|
||||
in_tif_paths = in_tif_paths + in_tif_paths1
|
||||
return in_tif_paths
|
||||
|
||||
@staticmethod
|
||||
def get_tif_paths_new(file_dir, name):
|
||||
in_tif_paths = []
|
||||
if os.path.exists(file_dir + name + '\\'):
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir + name + '\\', '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(file_dir + name + '\\', '*.tiff')))
|
||||
if in_tif_paths1 != []:
|
||||
in_tif_paths = in_tif_paths + in_tif_paths1
|
||||
else:
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir, '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(file_dir, '*.tiff')))
|
||||
if len(in_tif_paths) == 0:
|
||||
in_tif_paths = in_tif_paths + in_tif_paths1
|
||||
return in_tif_paths
|
||||
|
||||
@staticmethod
|
||||
def get_polarization_mode(in_tif_paths):
|
||||
pol_dic = {}
|
||||
pola_list = [0,0,0,0]
|
||||
for in_tif_path in in_tif_paths:
|
||||
# 获取极化类型
|
||||
if '_HH_' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'HH': in_tif_path})
|
||||
pola_list[0] = 1
|
||||
elif '_HV_' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'HV': in_tif_path})
|
||||
pola_list[1] = 1
|
||||
elif '_VH_' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'VH': in_tif_path})
|
||||
pola_list[2] = 1
|
||||
elif '_VV_' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'VV': in_tif_path})
|
||||
pola_list[3] = 1
|
||||
elif 'LocalIncidenceAngle' in os.path.basename(in_tif_path) or 'ncidenceAngle' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'LocalIncidenceAngle': in_tif_path})
|
||||
elif 'inc_angle' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'inc_angle': in_tif_path})
|
||||
elif 'inci_Angle-ortho' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'inci_Angle-ortho': in_tif_path})
|
||||
elif 'LocalincidentAngle-ortho' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'LocalIncidentAngle-ortho': in_tif_path})
|
||||
elif 'ori_sim' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'ori_sim': in_tif_path})
|
||||
elif 'sim_ori' in os.path.basename(in_tif_path):
|
||||
pol_dic.update({'sim_ori': in_tif_path})
|
||||
pol_dic.update({'pola':pola_list})
|
||||
return pol_dic
|
||||
|
||||
@staticmethod
|
||||
def get_meta_paths(file_dir, name):
|
||||
meta_xml_paths = []
|
||||
if os.path.exists(file_dir + name + '\\'):
|
||||
meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.meta.xml')))
|
||||
else:
|
||||
meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.meta.xml')))
|
||||
if meta_xml_paths is None or meta_xml_paths == []:
|
||||
raise Exception('there is not .meta.xml in path: ', file_dir + '\\')
|
||||
return meta_xml_paths
|
||||
|
||||
@staticmethod
|
||||
def get_incidence_xml_paths(file_dir, name):
|
||||
meta_xml_paths = []
|
||||
if os.path.exists(file_dir + name + '\\'):
|
||||
meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.incidence.xml')))
|
||||
else:
|
||||
meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.incidence.xml')))
|
||||
if meta_xml_paths is None or meta_xml_paths == []:
|
||||
raise Exception('there is not .incidence.xml in path: ', file_dir + '\\')
|
||||
return meta_xml_paths
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_meta_dic(meta_xml_paths, name):
|
||||
para_dic = {}
|
||||
for mete_path in meta_xml_paths:
|
||||
if name in mete_path:
|
||||
para_dic.update({'META': mete_path})
|
||||
if para_dic is {}:
|
||||
raise Exception('the name of .meta.xml is error!')
|
||||
return para_dic
|
||||
|
||||
@staticmethod
|
||||
def get_incidence_dic(meta_xml_paths, name):
|
||||
para_dic = {}
|
||||
for mete_path in meta_xml_paths:
|
||||
if name in mete_path:
|
||||
para_dic.update({'Incidence': mete_path})
|
||||
if para_dic is {}:
|
||||
raise Exception('the name of .incidence.xml is error!')
|
||||
return para_dic
|
||||
|
||||
|
||||
@staticmethod
|
||||
def get_meta_dic_new(meta_xml_paths, name):
|
||||
para_dic = {}
|
||||
for mete_path in meta_xml_paths:
|
||||
if name in os.path.basename(mete_path):
|
||||
para_dic.update({'META': mete_path})
|
||||
else:
|
||||
para_dic.update({'Origin_META': mete_path})
|
||||
if para_dic is {}:
|
||||
raise Exception('the name of .meta.xml is error!')
|
||||
return para_dic
|
||||
|
||||
@staticmethod
|
||||
def get_meta_dic_VP(meta_xml_paths, name):
|
||||
para_dic = {}
|
||||
for mete_path in meta_xml_paths:
|
||||
if name in os.path.basename(mete_path):
|
||||
para_dic.update({name + '_META': mete_path})
|
||||
else:
|
||||
para_dic.update({name + '_Origin_META': mete_path})
|
||||
if para_dic is {}:
|
||||
raise Exception('the name of .meta.xml is error!')
|
||||
return para_dic
|
||||
|
||||
def get_mult_tar_gz_inf(self,tar_gz_path, workspace_preprocessing_path):
|
||||
para_dic = {}
|
||||
name = os.path.split(tar_gz_path)[1].rstrip('.tar.gz')
|
||||
para_dic.update({'name': name})
|
||||
|
||||
file_dir = os.path.join(workspace_preprocessing_path, name + '\\')
|
||||
if self._debug == False:
|
||||
fileHandle().de_targz(tar_gz_path, file_dir)
|
||||
# 元文件字典
|
||||
para_dic.update(InitPara.get_meta_dic_VP(InitPara.get_meta_paths(file_dir, name), name))
|
||||
# tif路径字典
|
||||
pol_dic = InitPara.get_polarization_mode(InitPara.get_tif_paths(file_dir, name))
|
||||
parameter_path = os.path.join(file_dir, "orth_para.txt")
|
||||
para_dic.update({name + "paraMeter": parameter_path})
|
||||
for key, in_tif_path in pol_dic.items():
|
||||
para_dic.update({name + '_' + key: in_tif_path})
|
||||
return para_dic
|
||||
|
||||
|
||||
def get_mult_tar_gz_infs(self,processing_paras, workspace_preprocessing_path):
|
||||
tif_names_list = []
|
||||
tar_inf_dic = {}
|
||||
for key, value in processing_paras.items():
|
||||
if 'sar_path' in key:
|
||||
para_dic = self.get_mult_tar_gz_inf(value, workspace_preprocessing_path)
|
||||
tif_names_list.append(para_dic['name'])
|
||||
para_dic.pop('name')
|
||||
tar_inf_dic.update(para_dic)
|
||||
tar_inf_dic.update({'name_list': tif_names_list})
|
||||
|
||||
return tar_inf_dic
|
|
@ -1,135 +0,0 @@
|
|||
from xml.etree.ElementTree import ElementTree
|
||||
import os
|
||||
|
||||
|
||||
class DictXml:
|
||||
def __init__(self, xml_path):
|
||||
self.xml_path = xml_path
|
||||
self.__tree = ElementTree()
|
||||
self.__root = None
|
||||
self.init_xml()
|
||||
|
||||
def init_xml(self):
|
||||
self.__root = self.__tree.parse(self.xml_path)
|
||||
if self.__root is None:
|
||||
raise Exception("get root failed")
|
||||
|
||||
def get_extend(self):
|
||||
productInfo = self.__root.find("imageinfo")
|
||||
if productInfo is None:
|
||||
raise Exception("get imageInfo failed")
|
||||
|
||||
corner = productInfo.find("corner")
|
||||
if corner is None:
|
||||
raise Exception("get corner failed")
|
||||
|
||||
topLeft = corner.find("topLeft")
|
||||
if topLeft is None:
|
||||
raise Exception("get topLeft failed")
|
||||
|
||||
topRight = corner.find("topRight")
|
||||
if topRight is None:
|
||||
raise Exception("get topRight failed")
|
||||
|
||||
bottomLeft = corner.find("bottomLeft")
|
||||
if bottomLeft is None:
|
||||
raise Exception("get bottomLeft failed")
|
||||
|
||||
bottomRight = corner.find("bottomRight")
|
||||
if bottomRight is None:
|
||||
raise Exception("get bottomRight failed")
|
||||
|
||||
point_upleft = [float(topLeft.find("longitude").text), float(topLeft.find("latitude").text)]
|
||||
point_upright = [float(topRight.find("longitude").text), float(topRight.find("latitude").text)]
|
||||
point_downleft = [float(bottomLeft.find("longitude").text), float(bottomLeft.find("latitude").text)]
|
||||
point_downright = [float(bottomRight.find("longitude").text), float(bottomRight.find("latitude").text)]
|
||||
scopes = [point_upleft, point_upright, point_downleft, point_downright]
|
||||
|
||||
point_upleft_buf = [float(topLeft.find("longitude").text) - 0.5, float(topLeft.find("latitude").text) + 0.5]
|
||||
point_upright_buf = [float(topRight.find("longitude").text) + 0.5, float(topRight.find("latitude").text) + 0.5]
|
||||
point_downleft_buf = [float(bottomLeft.find("longitude").text) - 0.5,
|
||||
float(bottomLeft.find("latitude").text) - 0.5]
|
||||
point_downright_buf = [float(bottomRight.find("longitude").text) + 0.5,
|
||||
float(bottomRight.find("latitude").text) - 0.5]
|
||||
scopes_buf = [point_upleft_buf, point_upright_buf, point_downleft_buf, point_downright_buf]
|
||||
return scopes
|
||||
|
||||
|
||||
class xml_extend:
|
||||
def __init__(self, xml_path):
|
||||
self.xml_path = xml_path
|
||||
self.__tree = ElementTree()
|
||||
self.__root = None
|
||||
self.init_xml()
|
||||
|
||||
def init_xml(self):
|
||||
self.__root = self.__tree.parse(self.xml_path)
|
||||
if self.__root is None:
|
||||
raise Exception("get root failed")
|
||||
|
||||
def get_extend(self):
|
||||
ProductBasicInfo = self.__root.find("ProductBasicInfo")
|
||||
if ProductBasicInfo is None:
|
||||
raise Exception("get ProductBasicInfo failed")
|
||||
|
||||
SpatialCoverageInformation = ProductBasicInfo.find("SpatialCoverageInformation")
|
||||
if SpatialCoverageInformation is None:
|
||||
raise Exception("get SpatialCoverageInformation failed")
|
||||
|
||||
TopLeftLongitude = SpatialCoverageInformation.find("TopLeftLongitude")
|
||||
if TopLeftLongitude is None:
|
||||
raise Exception("get TopLeftLongitude failed")
|
||||
|
||||
TopLeftLatitude = SpatialCoverageInformation.find("TopLeftLatitude")
|
||||
if TopLeftLatitude is None:
|
||||
raise Exception("get TopLeftLatitude failed")
|
||||
|
||||
TopRightLongitude = SpatialCoverageInformation.find("TopRightLongitude")
|
||||
if TopRightLongitude is None:
|
||||
raise Exception("get TopRightLongitude failed")
|
||||
|
||||
TopRightLatitude = SpatialCoverageInformation.find("TopRightLatitude")
|
||||
if TopRightLatitude is None:
|
||||
raise Exception("get TopRightLatitude failed")
|
||||
|
||||
BottomRightLongitude = SpatialCoverageInformation.find("BottomRightLongitude")
|
||||
if BottomRightLongitude is None:
|
||||
raise Exception("get BottomRightLongitude failed")
|
||||
|
||||
BottomRightLatitude = SpatialCoverageInformation.find("BottomRightLatitude")
|
||||
if BottomRightLatitude is None:
|
||||
raise Exception("get BottomRightLatitude failed")
|
||||
|
||||
BottomLeftLongitude = SpatialCoverageInformation.find("BottomLeftLongitude")
|
||||
if BottomLeftLongitude is None:
|
||||
raise Exception("get BottomLeftLongitude failed")
|
||||
|
||||
BottomLeftLatitude = SpatialCoverageInformation.find("BottomLeftLatitude")
|
||||
if BottomLeftLatitude is None:
|
||||
raise Exception("get BottomLeftLatitude failed")
|
||||
|
||||
point_upleft = [float(TopLeftLongitude.text), float(TopLeftLatitude.text)]
|
||||
point_upright = [float(TopRightLongitude.text), float(TopRightLatitude.text)]
|
||||
point_downleft = [float(BottomLeftLongitude.text), float(BottomLeftLatitude.text)]
|
||||
point_downright = [float(BottomRightLongitude.text), float(BottomRightLatitude.text)]
|
||||
scopes = [point_upleft, point_upright, point_downleft, point_downright]
|
||||
|
||||
point_upleft_buf = [float(TopLeftLongitude.text) - 0.5, float(TopLeftLatitude.text) + 0.5]
|
||||
point_upright_buf = [float(TopRightLongitude.text) + 0.5, float(TopRightLatitude.text) + 0.5]
|
||||
point_downleft_buf = [float(BottomLeftLongitude.text) - 0.5, float(BottomLeftLatitude.text) - 0.5]
|
||||
point_downright_buf = [float(BottomRightLongitude.text) + 0.5, float(BottomRightLatitude.text) - 0.5]
|
||||
scopes_buf = [point_upleft_buf, point_upright_buf, point_downleft_buf, point_downright_buf]
|
||||
return scopes
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
xml_path = r'E:\MicroWorkspace\GF3A_nanjing\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422.meta.xml'
|
||||
scopes, scopes_buf = DictXml(xml_path).get_extend()
|
||||
print(scopes)
|
||||
print(scopes_buf)
|
||||
# path = r'D:\BaiduNetdiskDownload\GZ\lon.rdr'
|
||||
# path2 = r'D:\BaiduNetdiskDownload\GZ\lat.rdr'
|
||||
# path3 = r'D:\BaiduNetdiskDownload\GZ\lon_lat.tif'
|
||||
# s = ImageHandler().band_merge(path, path2, path3)
|
||||
# print(s)
|
||||
# pass
|
|
@ -1,319 +0,0 @@
|
|||
"""
|
||||
@Project :microproduct
|
||||
@File :OnePlantHeight.PY
|
||||
@Function :主函数
|
||||
|
||||
@Author :LMM
|
||||
@Date :2021/10/19 14:39
|
||||
@Version :1.0.0
|
||||
"""
|
||||
from xml.dom import minidom
|
||||
from xml.etree.ElementTree import ElementTree, Element
|
||||
import xml.dom.minidom
|
||||
from lxml import etree
|
||||
import codecs
|
||||
import xml.etree.cElementTree as ET
|
||||
|
||||
|
||||
class CreateMetafile:
|
||||
"""
|
||||
生成元文件案例
|
||||
"""
|
||||
def __init__(self, input_image_path, input_para_file, an_li_path, path):
|
||||
"""
|
||||
input_image_path: 影像头文件
|
||||
input_para_file: 配置文件
|
||||
an_li_path:案例路径
|
||||
path:保存路径
|
||||
"""
|
||||
self.input_image_path = input_image_path
|
||||
self.input_para_file = input_para_file
|
||||
self.an_li_path= an_li_path
|
||||
self.path = path
|
||||
pass
|
||||
|
||||
def create_xml(self):
|
||||
"""
|
||||
读取元文件(只保留从头文件到sensor节点的部分)
|
||||
输出sensor的节点位置
|
||||
"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.input_image_path) # 影像头文件
|
||||
root = tree.getroot()
|
||||
# 1、只保留从头文件到sensor节点的部分
|
||||
element_trees = list(root)
|
||||
count = 0
|
||||
count_01=1
|
||||
for element in element_trees:
|
||||
count = count+1
|
||||
if element.tag == "sensor":
|
||||
element.tail = "\n\n\t"
|
||||
count_01 = count-1
|
||||
for i in range(0, len(element_trees)):
|
||||
if i > count_01:
|
||||
root.remove(element_trees[i])
|
||||
# 2、只保留"satellite", "orbitType", "attiType", "Direction", "ReceiveTime", "sensor"的部分
|
||||
element_trees2 = list(root)
|
||||
for i in element_trees2:
|
||||
if i.tag not in ["satellite", "orbitType", "attiType", "Direction", "ReceiveTime", "sensor"]:
|
||||
root.remove(i)
|
||||
# 3、获取"sensor"节点的位置,并输出
|
||||
count2 = 0
|
||||
count2_01=1
|
||||
element_trees3 = list(root)
|
||||
for element in element_trees3:
|
||||
count2 = count2+1
|
||||
if element.tag == "sensor":
|
||||
element.tail = "\n\n\t"
|
||||
count2_01 = count2-1
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
return count2_01
|
||||
|
||||
@staticmethod
|
||||
def create_node(tag, property_map, content):
|
||||
"""
|
||||
fun: 新造一个节点
|
||||
para: tag:节点标签
|
||||
para: property_map:属性及属性值map
|
||||
para: content: 节点闭合标签里的文本内容
|
||||
para: return 新节点
|
||||
"""
|
||||
element = Element(tag, property_map)
|
||||
element.text = content
|
||||
element.tail = "\n\t"
|
||||
return element
|
||||
|
||||
def add_standard_xml(self, num):
|
||||
"""
|
||||
模板直接写入到元文件中
|
||||
"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.path) # 影像头文件
|
||||
root = tree.getroot()
|
||||
|
||||
tree2 = ElementTree()
|
||||
tree2.parse(self.an_li_path) # 影像头文件
|
||||
root2 = tree2.getroot()
|
||||
|
||||
productinfo = root2.find("productinfo")
|
||||
root.insert(num + 1, productinfo)
|
||||
processinfo = root2.find("processinfo")
|
||||
root.insert(num + 2, processinfo)
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def add_img_xml(self, num,SrcImageName):
|
||||
"""添加影像信息"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.path)
|
||||
root = tree.getroot()
|
||||
|
||||
a = self.create_node("SrcImageName", {"desc": "原始影像名称"}, SrcImageName)
|
||||
root.insert(num+1, a)
|
||||
# root.append(a)
|
||||
b = self.create_node("AlgCompt", {"desc": "算法信息"}, "\n\t\t")
|
||||
b.tail = "\n\n\t"
|
||||
# root.append(b)
|
||||
root.insert(num+2, b)
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def add_info_xml(self):
|
||||
"""
|
||||
向元文件中添加配置文件的部分节("AlgorithmName", "ChsName", "AlgorithmDesc", "Version",
|
||||
"AlgorithmClass", "AlgorithmLevel", "AlgoirthmID", "Author")
|
||||
"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.input_para_file) # 配置文件
|
||||
root = tree.getroot()
|
||||
|
||||
tree2 = ElementTree()
|
||||
tree2.parse(self.path)
|
||||
root2 = tree2.getroot()
|
||||
AlgCompt = root2.find("AlgCompt")
|
||||
|
||||
a = root.find("AlgCompt")
|
||||
|
||||
element_trees = list(a)
|
||||
for element in element_trees:
|
||||
if element.tag in ["AlgorithmName", "ChsName", "AlgorithmDesc", "Version",
|
||||
"AlgorithmClass", "AlgorithmLevel", "AlgoirthmID", "Author"]:
|
||||
element.tail = "\n\t\t"
|
||||
AlgCompt.append(element)
|
||||
if element.tag == "Author":
|
||||
element.tail = "\n\t"
|
||||
|
||||
tree2.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def add_class_info(self, type_id_name, type_id_parent=None):
|
||||
"""
|
||||
向元文件中:
|
||||
1.删除productinfo-productType信息;
|
||||
2.加入地物类别信息;
|
||||
输出:
|
||||
<Class1>
|
||||
<parent_id>1</parent_id>
|
||||
<id>101</id>
|
||||
<covernm>耕地</covernm>
|
||||
</Class1>
|
||||
<Class2>
|
||||
<parent_id>5</parent_id>
|
||||
<id>502</id>
|
||||
<covernm>草地</covernm>
|
||||
</Class2>
|
||||
"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.path) # 配置文件
|
||||
root = tree.getroot()
|
||||
productinfo = root.find("productinfo")
|
||||
# element_trees = list(productinfo)
|
||||
# for element in element_trees:
|
||||
# if element.tag == "productType":
|
||||
# productinfo.remove(element) # 移除 "productType"
|
||||
productinfo.find("productConsumeTime").tail = "\n\t\t" # 定位到productConsumeTime,设置好位置
|
||||
b = self.create_node("LandCoverClass", {}, "\n\t\t\t")
|
||||
b.tail = "\n\t\t"
|
||||
productinfo_count=0
|
||||
for i in list(productinfo):
|
||||
productinfo_count=productinfo_count+1
|
||||
if i.tag=="productConsumeTime":
|
||||
break
|
||||
productinfo.insert(productinfo_count, b)
|
||||
|
||||
# productinfo.insert(num, b) # 插入LandCoverClass
|
||||
class_num = 1
|
||||
for key, value in type_id_name.items():
|
||||
|
||||
LandCoverClass = productinfo.find("LandCoverClass")
|
||||
name="Class"+str(class_num)
|
||||
# name = "Class"
|
||||
c = self.create_node(name, {}, "\n\t\t\t\t")
|
||||
if class_num!=(len(type_id_name.keys())):
|
||||
c.tail = "\n\t\t\t"
|
||||
else:
|
||||
c.tail = "\n\t\t"
|
||||
LandCoverClass.append(c) # 插入LandCoverClass
|
||||
|
||||
# LandCoverClass.find("Class")[num].tail = "\n\t\t\t"
|
||||
aaa=LandCoverClass.find(name)
|
||||
|
||||
if type_id_parent is not None:
|
||||
parent_id = self.create_node("parent_id", {}, type_id_parent[key])
|
||||
parent_id.tail="\n\t\t\t\t"
|
||||
LandCoverClass.find(name).append(parent_id)
|
||||
id = self.create_node("id", {}, str(key))
|
||||
id.tail = "\n\t\t\t\t"
|
||||
LandCoverClass.find(name).append(id)
|
||||
covernm = self.create_node("covernm", {}, value)
|
||||
covernm.tail = "\n\t\t\t"
|
||||
LandCoverClass.find(name).append(covernm)
|
||||
class_num=class_num+1
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def rewrite_name(self):
|
||||
"""
|
||||
修改class的名字:
|
||||
class1->class
|
||||
class2->class
|
||||
"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.path) # 配置文件
|
||||
root = tree.getroot()
|
||||
productinfo = root.find("productinfo")
|
||||
LandCoverClass=productinfo.find("LandCoverClass")
|
||||
element_trees = list(LandCoverClass)
|
||||
for element in element_trees:
|
||||
element.tag="Class"
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def OrthoInsertNode(self):
|
||||
"""正射算法专用,插入节点<l1aInfo>"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.path) # 影像头文件
|
||||
root = tree.getroot()
|
||||
|
||||
# 插入节点<l1aInfo>
|
||||
count2 = 0
|
||||
count2_01=1
|
||||
element_trees3 = list(root)
|
||||
for element in element_trees3:
|
||||
count2 = count2+1
|
||||
if element.tag == "sensor":
|
||||
element.tail = "\n\n\t"
|
||||
count2_01 = count2-1
|
||||
b = self.create_node("l1aInfo", {}, "\n\t\t")
|
||||
b.tail = "\n\n\t"
|
||||
root.insert(count2_01+1, b)
|
||||
|
||||
# 查询节点位置<l1aInfo>
|
||||
node_l1aInfo=root.find("l1aInfo")
|
||||
|
||||
img_tree = ElementTree()
|
||||
img_tree.parse(self.input_image_path) # 影像头文件
|
||||
img_root = img_tree.getroot()
|
||||
|
||||
node_imageinfo = img_root.find("imageinfo")
|
||||
node_processinfo=img_root.find("processinfo")
|
||||
|
||||
ele_node_imageinfo = list(node_imageinfo)
|
||||
ele_node_processinfo= list(node_processinfo)
|
||||
|
||||
for i in ele_node_imageinfo:
|
||||
if i.tag == "QualifyValue":
|
||||
i.tail = "\n\t\t"
|
||||
node_l1aInfo.append(i)
|
||||
|
||||
for j in ele_node_processinfo:
|
||||
if j.tag == "CalibrationConst":
|
||||
j.tail = "\n\t" #后一个节点的位置
|
||||
node_l1aInfo.append(j)
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
def process(self,SrcImageName):
|
||||
"""
|
||||
不涉及到地表覆盖调用此函数
|
||||
"""
|
||||
if self.input_image_path is None:
|
||||
import xml.etree.cElementTree as ET
|
||||
product = ET.Element("product") # 根节点tag= "product"
|
||||
product.text = "\n\t"
|
||||
tree = ET.ElementTree(product)
|
||||
tree.write(self.path)
|
||||
count = 0
|
||||
count_2 = -1
|
||||
else:
|
||||
count = self.create_xml()
|
||||
count_2 = count
|
||||
self.add_standard_xml(count)
|
||||
self.add_img_xml(count_2, SrcImageName)
|
||||
self.add_info_xml()
|
||||
|
||||
def process2(self, type_id_name, type_id_parent,SrcImageName):
|
||||
"""
|
||||
涉及到地表覆盖的调用此函数
|
||||
type_id_name={"101":"耕地","502":"草地"}
|
||||
type_id_parent={"101":"1","502":"5"}
|
||||
"""
|
||||
count = self.create_xml()
|
||||
self.add_standard_xml(count)
|
||||
self.add_img_xml(count,SrcImageName)
|
||||
self.add_info_xml()
|
||||
self.add_class_info(type_id_name, type_id_parent)
|
||||
self.rewrite_name()
|
||||
|
||||
def process3(self,SrcImageName):
|
||||
"""
|
||||
正射调用此函数
|
||||
"""
|
||||
if self.input_image_path is None:
|
||||
import xml.etree.cElementTree as ET
|
||||
product = ET.Element("product") # 根节点tag= "product"
|
||||
product.text = "\n\t"
|
||||
tree = ET.ElementTree(product)
|
||||
tree.write(self.path)
|
||||
count = 0
|
||||
else:
|
||||
count = self.create_xml()
|
||||
self.add_standard_xml(count)
|
||||
self.add_img_xml(count, SrcImageName)
|
||||
self.add_info_xml()
|
||||
self.OrthoInsertNode()
|
||||
|
|
@ -1,258 +0,0 @@
|
|||
import json
|
||||
from xml.etree.ElementTree import ElementTree, Element
|
||||
import shutil
|
||||
|
||||
import xmltodict
|
||||
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.algtools.PreProcess import PreProcess as pp
|
||||
from osgeo import gdal
|
||||
import numpy as np
|
||||
import datetime
|
||||
import os
|
||||
import glob
|
||||
os.environ['PROJ_LIB'] = r"E:\soft\Anaconda\envs\micro\Lib\site-packages\osgeo\data\proj"
|
||||
|
||||
class CreateMetaDict:
|
||||
def __init__(self, image_path, origin_xml, pack_path, out_path1, out_path2):
|
||||
self.ImageHandler = ImageHandler()
|
||||
self.image_path = image_path
|
||||
self.origin_xml = origin_xml
|
||||
self.pack_path = pack_path
|
||||
self.file_size = self.get_file_size()
|
||||
self.out_path1 = out_path1
|
||||
self.out_path2 = out_path2
|
||||
self.timeDict = self.get_productTime()
|
||||
pass
|
||||
|
||||
def convertToDateTime(self, string):
|
||||
dt = datetime.datetime.strptime(string, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
return dt
|
||||
|
||||
def dateStrToStamp(self, dateStr):
|
||||
st = datetime.datetime.strptime(dateStr, "%Y-%m-%dT%H:%M:%S.%f").timestamp()
|
||||
return st
|
||||
|
||||
def StampToDateTime(self, dateTime):
|
||||
dt = datetime.datetime.fromtimestamp(dateTime)
|
||||
return dt
|
||||
|
||||
def calu_nature(self):
|
||||
"""
|
||||
将productinfo节点需要填写的信息存入字典中
|
||||
image_path:影像路径
|
||||
image_pair:输入的压缩包中的极化对 例:hh,hv,vh,vv=【1,1,1,1】
|
||||
out_path1:地理转平面的输出路径
|
||||
out_path2:平面转地理的输出路径
|
||||
"""
|
||||
|
||||
para_dict = {}
|
||||
|
||||
proj = self.ImageHandler.get_projection(self.image_path) # 输出的影像若是投影坐标系则先转成地理坐标系
|
||||
keyword = proj.split("[", 2)[0] # 若是地理坐标系则pass
|
||||
if keyword == "GEOGCS":
|
||||
pass
|
||||
elif keyword == "PROJCS":
|
||||
pp.trans_projcs2geogcs(self.out_path2, self.image_path)
|
||||
image_path = self.out_path2
|
||||
elif len(keyword) == 0 or keyword.strip() == "" or keyword.isspace() is True:
|
||||
raise Exception('image projection is missing!')
|
||||
|
||||
pp.trans_geogcs2projcs(self.out_path1, self.image_path) # 坐标投影, 地理转平面投影坐标
|
||||
imageinfo_widthspace = self.ImageHandler.get_geotransform(self.out_path1)[1] # 投影后的分辨率
|
||||
# imageinfo_heightspace = -self.ImageHandler.get_geotransform(out_path1)[5] # 投影后的分辨率
|
||||
# para_dict.update({"imageinfo_widthspace": imageinfo_widthspace})
|
||||
# para_dict.update({"imageinfo_heightspace": imageinfo_heightspace})
|
||||
|
||||
para_dict.update({"imageinfo_ProductResolution": imageinfo_widthspace})
|
||||
|
||||
para_dict.update({"imageinfo_ProductFormat": "GEOTIFF"})
|
||||
para_dict.update({"imageinfo_CompressionMethod": "None"})
|
||||
para_dict.update({"imageinfo_ProductSize": str(self.file_size) + "MB"}) #todo 产品总大小
|
||||
|
||||
get_scope = self.ImageHandler.get_scope(self.image_path)
|
||||
point_upleft, point_upright, point_downleft, point_downright = get_scope[0], get_scope[1], get_scope[2], get_scope[3]
|
||||
para_dict.update({"SpatialCoverageInformation_TopLeftLatitude": point_upleft[1]})
|
||||
para_dict.update({"SpatialCoverageInformation_TopLeftLongitude": point_upleft[0]})
|
||||
para_dict.update({"SpatialCoverageInformation_TopRightLatitude": point_upright[1]})
|
||||
para_dict.update({"SpatialCoverageInformation_TopRightLongitude": point_upright[0]})
|
||||
para_dict.update({"SpatialCoverageInformation_BottomLeftLatitude": point_downleft[1]})
|
||||
para_dict.update({"SpatialCoverageInformation_BottomLeftLongitude": point_downleft[0]})
|
||||
para_dict.update({"SpatialCoverageInformation_BottomRightLatitude": point_downright[1]})
|
||||
para_dict.update({"SpatialCoverageInformation_BottomRightLongitude": point_downright[0]})
|
||||
longitude_max = np.array([point_upleft[0], point_upright[0], point_downleft[0], point_downright[0]]).max()
|
||||
longitude_min = np.array([point_upleft[0], point_upright[0], point_downleft[0], point_downright[0]]).min()
|
||||
latitude_max = np.array([point_upleft[1], point_upright[1], point_downleft[1], point_downright[1]]).max()
|
||||
latitude_min = np.array([point_upleft[1], point_upright[1], point_downleft[1], point_downright[1]]).min()
|
||||
imageinfo_center_latitude = (latitude_max + latitude_min) / 2
|
||||
imageinfo_center_longitude = (longitude_max + longitude_min) / 2
|
||||
para_dict.update({"SpatialCoverageInformation_CenterLatitude": imageinfo_center_latitude})
|
||||
para_dict.update({"SpatialCoverageInformation_CenterLongitude": imageinfo_center_longitude})
|
||||
|
||||
para_dict.update({"TimeCoverageInformation_StartTime": self.timeDict.get("startTime")})
|
||||
para_dict.update({"TimeCoverageInformation_CenterTime": self.timeDict.get("centerTime")})
|
||||
para_dict.update({"TimeCoverageInformation_EndTime": self.timeDict.get("endTime")})
|
||||
|
||||
para_dict.update({"CoordinateReferenceSystemInformation_EarthEllipsoid": "WGS84"})
|
||||
para_dict.update({"CoordinateReferenceSystemInformation_MapProjection": "UTM"})
|
||||
para_dict.update({"CoordinateReferenceSystemInformation_ZoneNo": "None"})
|
||||
|
||||
para_dict.update({"MetaInfo_Unit": "none"}) # 设置单位
|
||||
para_dict.update({"MetaInfo_UnitDes": "无量纲"}) # 设置单位
|
||||
|
||||
# 补充ProductProductionInfo节信息
|
||||
data_name = os.path.basename(self.image_path)
|
||||
strs = data_name.split("_")
|
||||
para_dict.update({"DataSources_DataSource_Satellite": strs[0]})
|
||||
para_dict.update({"DataSources_DataSource_Sensor": strs[0]})
|
||||
|
||||
para_dict.update({"ObservationGeometry_SatelliteAzimuth": "None"})
|
||||
para_dict.update({"ObservationGeometry_SatelliteRange": "None"})
|
||||
|
||||
para_dict.update({"ProductProductionInfo_BandSelection": "1"})
|
||||
para_dict.update({"ProductProductionInfo_DataSourceDescription": "None"})
|
||||
para_dict.update({"ProductProductionInfo_DataSourceProcessingDescription": "参考产品介绍PDF"})
|
||||
productGentime = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
para_dict.update({"ProductProductionInfo_ProductionDate": productGentime})
|
||||
para_dict.update({"ProductProductionInfo_AuxiliaryDataDescription": ""})
|
||||
|
||||
# para_dict.update({"ProductPublishInfo_Processor": "德清院"}) # 生产者
|
||||
# para_dict.update({"ProductPublishInfo_DistributionUnit": "none"}) # 分发单位
|
||||
# para_dict.update({"ProductPublishInfo_ContactInformation": "none"}) # 联系信息
|
||||
return para_dict
|
||||
|
||||
def get_productTime(self):
|
||||
time_dict = {}
|
||||
tree = ElementTree()
|
||||
tree.parse(self.origin_xml)
|
||||
root = tree.getroot()
|
||||
startTime = " "
|
||||
endTime = " "
|
||||
centerTime = " "
|
||||
|
||||
imagingTime = root.find("productInfo").find('sceneInfo')
|
||||
for n in imagingTime:
|
||||
if n.tag == 'start':
|
||||
startTime = self.convertToDateTime(n.find('timeUTC').text)
|
||||
if n.tag == 'stop':
|
||||
endTime = self.convertToDateTime(n.find('timeUTC').text)
|
||||
ct = (endTime.timestamp() + startTime.timestamp()) / 2
|
||||
time_dict.update({"startTime": startTime})
|
||||
time_dict.update({"centerTime": self.StampToDateTime(ct)})
|
||||
time_dict.update({"endTime": endTime})
|
||||
return time_dict
|
||||
|
||||
def get_file_size(self):
|
||||
in_tif_paths = list(glob.glob(os.path.join(self.pack_path, '*.tif')))
|
||||
in_tif_paths1 = list(glob.glob(os.path.join(self.pack_path, '*.tiff')))
|
||||
in_tif_paths += in_tif_paths1
|
||||
size = 0
|
||||
for file in in_tif_paths:
|
||||
fsize = os.path.getsize(file) # 返回的是字节大小
|
||||
size += fsize
|
||||
return round(size / float(1024*1024), 2)
|
||||
|
||||
|
||||
class CreateProductXml:
|
||||
def __init__(self, par_dict, model_path, xml_path):
|
||||
self.par_dict = par_dict
|
||||
self.xml_path = xml_path
|
||||
shutil.copy(model_path, xml_path)
|
||||
pass
|
||||
|
||||
def create_standard_xml(self):
|
||||
"""将字典中的信息写入到copy的xml文件中"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.xml_path) # 影像头文件
|
||||
root = tree.getroot()
|
||||
|
||||
productinfo = root.find("ProductBasicInfo")
|
||||
for key, value in self.par_dict.items():
|
||||
if key.split("_")[0] == "imageinfo":
|
||||
productinfo.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[0] == "SpatialCoverageInformation":
|
||||
imageinfo = productinfo.find("SpatialCoverageInformation")
|
||||
imageinfo.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[0] == "TimeCoverageInformation":
|
||||
timeInfo = productinfo.find("TimeCoverageInformation")
|
||||
timeInfo.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[0] == "CoordinateReferenceSystemInformation":
|
||||
geoInfo = productinfo.find("CoordinateReferenceSystemInformation")
|
||||
geoInfo.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[0] == "MetaInfo":
|
||||
metaInfo = productinfo.find("MetaInfo")
|
||||
metaInfo.find(key.split("_")[1]).text = str(value)
|
||||
ProductProductionInfo = root.find("ProductProductionInfo") # 原始数据信息
|
||||
for key, value in self.par_dict.items():
|
||||
if key.split("_")[0] == "DataSources":
|
||||
dataSources = ProductProductionInfo.find("DataSources")
|
||||
dataSource = dataSources.find("DataSource")
|
||||
dataSource.find(key.split("_")[2]).text = str(value)
|
||||
elif key.split("_")[0] == "ObservationGeometry":
|
||||
ObservationGeometry = ProductProductionInfo.find("ObservationGeometry")
|
||||
ObservationGeometry.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[0] == "ProductProductionInfo":
|
||||
ProductProductionInfo.find(key.split("_")[1]).text = str(value)
|
||||
|
||||
# ProductPublishInfo = root.find("ProductPublishInfo") # 发布者信息
|
||||
# for key, value in self.par_dict.items():
|
||||
# if key.split("_")[0] == "ProductPublishInfo":
|
||||
# ProductPublishInfo.find(key.split("_")[1]).text = str(value)
|
||||
|
||||
tree.write(self.xml_path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
|
||||
class OrthoAzimuth:
|
||||
|
||||
@staticmethod
|
||||
def FindInfomationFromJson(HeaderFile_dom_json, node_path_list):
|
||||
"""
|
||||
在Json文件中,按照指定路径解析出制定节点
|
||||
"""
|
||||
result_node = HeaderFile_dom_json
|
||||
for nodename in node_path_list:
|
||||
result_node = result_node[nodename]
|
||||
return result_node
|
||||
|
||||
@staticmethod
|
||||
def get_Azimuth_incidence(Azimuth_path):
|
||||
Azimuth_incidence = 0
|
||||
if not os.path.exists(Azimuth_path):
|
||||
return Azimuth_incidence
|
||||
with open(Azimuth_path) as f:
|
||||
Azimuth_incidence = f.readline()
|
||||
return Azimuth_incidence
|
||||
|
||||
@staticmethod
|
||||
def read_Azimuth_incidence(xml_path):
|
||||
# tree = ElementTree()
|
||||
# tree.parse(xml_path)
|
||||
# root = tree.getroot()
|
||||
# Azimuth_incidence = float(root.find('ProductProductionInfo').find('ObservationGeometry').find('SatelliteAzimuth').text)
|
||||
# return Azimuth_incidence
|
||||
with open(xml_path, 'r', encoding='utf-8') as fp:
|
||||
HeaderFile_dom_str = fp.read()
|
||||
HeaderFile_dom = xmltodict.parse(HeaderFile_dom_str) # 将XML转成json文本
|
||||
HeaderFile_dom_json = json.loads(json.dumps(HeaderFile_dom))
|
||||
node_path_list = ['Root', 'ProductProductionInfo', 'ObservationGeometry', 'SatelliteAzimuth']
|
||||
Azimuth_incidence = OrthoAzimuth.FindInfomationFromJson(HeaderFile_dom_json, node_path_list)
|
||||
return Azimuth_incidence
|
||||
|
||||
if __name__ == '__main__':
|
||||
xml_path = r'D:\micro\LWork\SurfaceRoughness\Temporary\preprocessing\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966-ortho\LT1B_MONO_MYC_STRIP4_005860_E130.9_N47.7_20230327_SLC_AHV_L1A_0000086966-ortho.meta.xml'
|
||||
a = OrthoAzimuth.read_Azimuth_incidence(xml_path)
|
||||
print(a)
|
||||
pass
|
||||
# image_path = r'D:\Micro\WorkSpace\test\GF3B_MYC_QPSI_003581_E120.6_N31.3_20220729_L1B_h_h_L10000073024_db_RD_geo.tif'
|
||||
# origin_xml = r'D:\Micro\WorkSpace\Ortho\Temporary\package\GF3B_MYC_QPSI_003581_E120.6_N31.3_20220729_L1A_AHV_L10000073024.meta.xml'
|
||||
# tem_folder = r'D:\Micro\WorkSpace\test'
|
||||
# pack_path = r'D:\Micro\WorkSpace\Ortho\Temporary\package'
|
||||
# out_dem_path1 = os.path.join(tem_folder, "trans_dem_geo_projcs.tif")
|
||||
# out_dem_path2 = os.path.join(tem_folder, "trans_dem_projcs_geo.tif")
|
||||
# para_dict = CreateMetaDict(image_path, origin_xml, pack_path, out_dem_path1, out_dem_path2).calu_nature()
|
||||
#
|
||||
# model_path = r'D:\Project\microproduct\Ortho\product.xml'
|
||||
# xml_path = r'D:\Micro\WorkSpace\test\test.xml'
|
||||
# CreateProductXml(para_dict, model_path, xml_path).create_standard_xml()
|
||||
|
||||
|
|
@ -1,48 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :onestar
|
||||
@File :ConfigeHandle.py
|
||||
@Contact:https://blog.csdn.net/songlh1234/article/details/83316468
|
||||
@Author :SHJ
|
||||
@Date :2021/11/23 16:57
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import os
|
||||
import configparser
|
||||
|
||||
|
||||
class Config:
|
||||
"""读写初始化配置文件"""
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def get(para_name, option='config', config_name='config.ini'):
|
||||
config = configparser.ConfigParser()
|
||||
config_path = os.path.join(os.getcwd(), config_name)
|
||||
config.read(config_path, encoding='utf-8')
|
||||
config.sections()
|
||||
exe_name = config.get(option, para_name)
|
||||
return exe_name
|
||||
|
||||
def get_list(self, para_name, option='config', config_name='config.ini'):
|
||||
config = configparser.ConfigParser()
|
||||
config_path = os.path.join(os.getcwd(), config_name)
|
||||
config.read(config_path, encoding='utf-8')
|
||||
config.sections()
|
||||
str_name = config.get(option, para_name)
|
||||
# 去除空格和回车
|
||||
str_name = str(str_name).replace("\n", "").replace(' ', '') # 去除空格和回车
|
||||
# 分割成lists
|
||||
name_list = str_name.split(',')
|
||||
return name_list
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# c = Config()
|
||||
# a = c.get('exe_name')
|
||||
# b = bool(c.get('debug'))
|
||||
# d = int(c.get('cover_threshold'))
|
||||
# f = float(c.get('ndvi_threshold'))
|
||||
|
||||
print('done')
|
|
@ -1,265 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project : microproduct
|
||||
@File : csvHandle.py
|
||||
@Function : 读写csv文件
|
||||
@Contact :
|
||||
@Author:SHJ
|
||||
@Date:2022/11/6
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import random
|
||||
import csv
|
||||
import logging
|
||||
import numpy as np
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.algtools.CoordinateTransformation import geo2imagexy
|
||||
from tool.algorithm.transforml1a.transHandle import TransImgL1A
|
||||
logger = logging.getLogger("mylog")
|
||||
|
||||
|
||||
class csvHandle:
|
||||
def __init__(self, row=0, col=0):
|
||||
self.imageHandler = ImageHandler()
|
||||
self.row = row
|
||||
self.col = col
|
||||
self.img_falg = False
|
||||
if row != 0 and col != 0:
|
||||
self.roi_img = np.zeros((row, col), dtype=float)
|
||||
self.img_falg = True
|
||||
|
||||
def get_roi_img(self):
|
||||
if self.img_falg:
|
||||
self.roi_img[self.roi_img == 0] = np.nan
|
||||
return self.roi_img
|
||||
else:
|
||||
return np.array([])
|
||||
|
||||
@staticmethod
|
||||
def readcsv(csv_path):
|
||||
reader = csv.reader(open(csv_path, newline=''))
|
||||
csv_list = []
|
||||
for line_data in reader:
|
||||
csv_list.append(line_data)
|
||||
return csv_list[1:]
|
||||
|
||||
def trans_measuredata(self, meas_data, tif_path):
|
||||
file_name = tif_path
|
||||
dataset = self.imageHandler.get_dataset(file_name)
|
||||
rows = self.imageHandler.get_img_height(file_name)
|
||||
cols = self.imageHandler.get_img_width(file_name)
|
||||
measdata_list = []
|
||||
logger.info('[MEASURE DATA]')
|
||||
for data in meas_data:
|
||||
lon = float(data[1])
|
||||
lat = float(data[2])
|
||||
coord = geo2imagexy(dataset, lon, lat)
|
||||
row = round(coord[1])
|
||||
col = round(coord[0])
|
||||
|
||||
if row >= 0 and row <= rows and col >= 0 and col <= cols:
|
||||
measdata_list.append([row, col, float(data[3])])
|
||||
logger.info([row, col, float(data[3])])
|
||||
else:
|
||||
logger.warning("measure data: %s is beyond tif scope !", data)
|
||||
pass
|
||||
return measdata_list
|
||||
|
||||
def write_roi_img_data(self, points, type_id):
|
||||
if self.img_falg:
|
||||
for p in points:
|
||||
r = p[0]
|
||||
c = p[1]
|
||||
if r < self.row and c < self.col:
|
||||
self.roi_img[r, c] = type_id
|
||||
|
||||
|
||||
def trans_landCover_measuredata(self, meas_data, cuted_ori_sim_path, max_train_num =100000):
|
||||
"""
|
||||
获取多边形区域内所有的点,分为训练集数据和测试集数据
|
||||
:para meas_data: csv读取的实测数据
|
||||
"""
|
||||
type_data = {}
|
||||
n = 1
|
||||
train_data_list = []
|
||||
for data in meas_data:
|
||||
for d in data:
|
||||
if d == '':
|
||||
raise Exception('there are empty data!', data)
|
||||
|
||||
type_id = int(data[1])
|
||||
type_name = data[2]
|
||||
if type_id not in type_data.keys():
|
||||
train_data_list.append([n, type_id, type_name, []])
|
||||
type_data.update({type_id: type_name})
|
||||
n += 1
|
||||
|
||||
pointList = self.__roiPolygonAnalysis(data[3])
|
||||
for points in pointList:
|
||||
roi_poly = [(float(lon), float(lat)) for (lon, lat) in points]
|
||||
tr = TransImgL1A(cuted_ori_sim_path, roi_poly)
|
||||
if tr._mask is not None:
|
||||
points = tr.get_roi_points()
|
||||
for train_data in train_data_list:
|
||||
if train_data[1] == type_id:
|
||||
train_data[3] += points
|
||||
self.write_roi_img_data(points, type_id)
|
||||
if train_data[3] == [] :
|
||||
raise Exception('there are empty data!', train_data)
|
||||
if len(train_data_list) <= 1:
|
||||
raise Exception('there is only one label type!', train_data_list)
|
||||
|
||||
for train_data in train_data_list:
|
||||
logger.info(str(train_data[0]) + "," + str(train_data[2]) + "," + "num:" + str(len(train_data[3])))
|
||||
max_num = max_train_num
|
||||
if (len(train_data[3]) > max_num):
|
||||
logger.info("max number =" + str(max_num) + ", random select" + str(max_num) + " point as train data!")
|
||||
train_data[3] = random.sample(train_data[3], max_num)
|
||||
|
||||
return train_data_list
|
||||
|
||||
def trans_landCover_measuredata_dic(self, meas_data, cuted_ori_sim_path,max_train_num=100000):
|
||||
train_data_list = self.trans_landCover_measuredata(meas_data, cuted_ori_sim_path,max_train_num)
|
||||
return self.trans_landCover_list2dic(train_data_list)
|
||||
|
||||
@staticmethod
|
||||
def trans_landCover_list2dic(train_data_list):
|
||||
ids = []
|
||||
class_ids = []
|
||||
ch_names = []
|
||||
positions = []
|
||||
for data in train_data_list:
|
||||
ids.append(data[0])
|
||||
class_ids.append(data[1])
|
||||
ch_names.append(data[2])
|
||||
positions.append(data[3])
|
||||
|
||||
train_data_dic = {}
|
||||
train_data_dic.update({"ids": ids})
|
||||
train_data_dic.update({"class_ids": class_ids})
|
||||
train_data_dic.update({"ch_names": ch_names})
|
||||
train_data_dic.update({"positions": positions})
|
||||
return train_data_dic
|
||||
|
||||
@staticmethod
|
||||
def __roiPolygonAnalysis(roiStr):
|
||||
"""
|
||||
将csv的POLY数据转为数组
|
||||
:para roiStr: poly数据
|
||||
:return pointList: 保存多边形的list
|
||||
"""
|
||||
pointList = []
|
||||
strContent = roiStr.replace("POLYGON", "")
|
||||
# 解析轮廓字符串为二维数组
|
||||
bracketsList = []
|
||||
strTemp = ''
|
||||
strList = []
|
||||
for c in strContent:
|
||||
if c == '(':
|
||||
bracketsList.append(c)
|
||||
continue
|
||||
elif c == ')':
|
||||
if len(bracketsList) > 0:
|
||||
bracketsList.pop(0)
|
||||
if len(strTemp) > 0:
|
||||
strList.append(strTemp)
|
||||
strTemp = ''
|
||||
else:
|
||||
strTemp += c
|
||||
for item in strList:
|
||||
if len(item) == 0:
|
||||
continue
|
||||
pTempList = item.split(',')
|
||||
pList = []
|
||||
for row in pTempList:
|
||||
cells = row.split(' ')
|
||||
if len(cells) != 2:
|
||||
continue
|
||||
point = [float(cells[0]), float(cells[1])]
|
||||
pList.append(point)
|
||||
pointList.append(pList)
|
||||
return pointList
|
||||
|
||||
def class_landcover_list(self, csv_path):
|
||||
"""
|
||||
输出csv表中的前三列
|
||||
"""
|
||||
reader = csv.reader(open(csv_path, newline=''))
|
||||
class_list=[]
|
||||
type_id_name = {}
|
||||
type_id_parent = {}
|
||||
for line_data in reader:
|
||||
class_list.append(line_data) # class_list含有四列
|
||||
for data in class_list[1:]:
|
||||
type_parent= data[0]
|
||||
type_id = int(data[1])
|
||||
type_name = data[2]
|
||||
|
||||
if type_id not in type_id_name.keys():
|
||||
type_id_name.update({type_id: type_name})
|
||||
type_id_parent.update({type_id: type_parent})
|
||||
return type_id_name, type_id_parent
|
||||
|
||||
def trans_VegePhenology_measdata_dic(self, meas_data, cuted_ori_sim_path):
|
||||
"""
|
||||
获取多边形区域内所有的点,分为训练集数据和测试集数据
|
||||
:para meas_data: csv读取的实测数据
|
||||
"""
|
||||
train_data = []
|
||||
test_data = []
|
||||
type_data = {}
|
||||
|
||||
for data in meas_data:
|
||||
data_use_type = data[0]
|
||||
sar_img_name = data[1]
|
||||
name = sar_img_name.rstrip('.tar.gz')
|
||||
|
||||
if data_use_type == 'train':
|
||||
phenology_id = int(data[2])
|
||||
phenology_name = data[3]
|
||||
if phenology_id not in type_data.keys():
|
||||
type_data.update({phenology_id: phenology_name})
|
||||
else:
|
||||
phenology_id = -1
|
||||
|
||||
pointList = self.__roiPolygonAnalysis(data[4])
|
||||
l1a_points = []
|
||||
for points in pointList:
|
||||
roi_poly = [(float(lon), float(lat)) for (lon, lat) in points]
|
||||
tr = TransImgL1A(cuted_ori_sim_path, roi_poly)
|
||||
l1a_points = tr.get_roi_points()
|
||||
# l1a_points = tr.get_lonlat_points()
|
||||
if data_use_type == 'train':
|
||||
train_data.append([name, phenology_id, l1a_points, type_data[phenology_id]])
|
||||
elif data_use_type == 'test':
|
||||
test_data.append([name, phenology_id, l1a_points])
|
||||
type_map = []
|
||||
for n, id in zip(range(len(type_data)), type_data):
|
||||
type_map.append([n + 1, id, type_data[id]])
|
||||
|
||||
return train_data, test_data, type_map
|
||||
|
||||
@staticmethod
|
||||
def vegePhenology_class_list(csv_path):
|
||||
"""
|
||||
输出csv表中的前三列
|
||||
"""
|
||||
reader = csv.reader(open(csv_path, newline=''))
|
||||
class_list=[]
|
||||
type_id_name = {}
|
||||
for line_data in reader:
|
||||
class_list.append(line_data) # class_list含有四列
|
||||
for data in class_list[1:]:
|
||||
type_id = data[2]
|
||||
type_name = data[3]
|
||||
|
||||
if type_id not in type_id_name.keys():
|
||||
if type_id.strip() != "":
|
||||
type_id_name.update({type_id: type_name})
|
||||
return type_id_name
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# csvh = csvHandle()
|
||||
# csv_path = r"I:\preprocessed\VegetationPhenologyMeasureData_E118.9_N31.4.csv"
|
||||
# data = csvh.trans_VegePhenology_measdata_dic(csvh.readcsv(csv_path),r"I:\preprocessed\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_ori_sim_preprocessed.tif")
|
||||
# pass
|
|
@ -1,88 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project : microproduct
|
||||
@File : fileHandle.py
|
||||
@Function : 文件创建、删除、解压、打包
|
||||
@Contact :
|
||||
@Author:SHJ
|
||||
@Date:2022/11/6
|
||||
@Version:1.0.0
|
||||
"""
|
||||
import os
|
||||
import tarfile
|
||||
import shutil
|
||||
|
||||
class fileHandle:
|
||||
def __init__(self, debug_mode=False):
|
||||
self.__debug_mode = debug_mode
|
||||
|
||||
def creat_dirs(self, path_list):
|
||||
"""
|
||||
创建文件夹
|
||||
"""
|
||||
for path in path_list:
|
||||
if os.path.exists(path):
|
||||
if self.__debug_mode is True:
|
||||
continue
|
||||
self.del_folder(path)
|
||||
os.makedirs(path)
|
||||
else:
|
||||
os.makedirs(path)
|
||||
|
||||
def del_folder(self, dic):
|
||||
"""
|
||||
删除整个文件夹
|
||||
"""
|
||||
if self.__debug_mode is True:
|
||||
return
|
||||
if os.path.isdir(dic):
|
||||
shutil.rmtree(dic)
|
||||
|
||||
def del_file(self, path_data):
|
||||
"""
|
||||
只删除文件,不删除文件夹
|
||||
"""
|
||||
for i in os.listdir(path_data): # os.listdir(path_data)#返回一个列表,里面是当前目录下面的所有东西的相对路径
|
||||
file_data = path_data + '\\' + i # 当前文件夹的下面的所有东西的绝对路径
|
||||
if os.path.isfile(file_data) is True: # os.path.isfile判断是否为文件,如果是文件,就删除.如果是文件夹.递归给del_file.
|
||||
os.remove(file_data)
|
||||
else:
|
||||
self.del_file(file_data)
|
||||
|
||||
@staticmethod
|
||||
def make_targz(output_filename, source_dir):
|
||||
"""
|
||||
一次性打包整个根目录。空子目录会被打包。
|
||||
如果只打包不压缩,将"w:gz"参数改为"w:"或"w"即可。
|
||||
:param output_filename:输出压缩包的完整路径,eg:'E:\test.tar.gz'
|
||||
:param source_dir:需要打包的跟目录,eg: 'E:\testFfile\'打包文件夹里面的所有文件,'E:\testFfile'打包文件夹
|
||||
"""
|
||||
dir = os.path.split(output_filename)[0]
|
||||
if os.path.exists(dir) is False:
|
||||
os.makedirs(dir)
|
||||
with tarfile.open(output_filename, "w:gz") as tar:
|
||||
tar.add(source_dir, arcname=os.path.basename(source_dir))
|
||||
|
||||
@staticmethod
|
||||
def de_targz(tar_gz_path, file_dir):
|
||||
name = os.path.split(tar_gz_path)[1].rstrip('.tar.gz')
|
||||
if os.path.exists(file_dir) is False:
|
||||
os.makedirs(file_dir)
|
||||
# 解压
|
||||
t = tarfile.open(tar_gz_path)
|
||||
t.extractall(path=file_dir)
|
||||
|
||||
@staticmethod
|
||||
def copyfile2dir(srcfile, dir): # 复制函数
|
||||
if not os.path.isfile(srcfile):
|
||||
print("%s not exist!" % (srcfile))
|
||||
else:
|
||||
fpath, fname = os.path.split(srcfile) # 分离文件名和路径
|
||||
if not os.path.exists(dir):
|
||||
os.makedirs(dir) # 创建路径
|
||||
shutil.copy(srcfile, dir + fname) # 复制文件
|
||||
|
||||
# if __name__ == '__main__':
|
||||
# file = fileHandle()
|
||||
# file.del_floder("I:\preprocessed")
|
||||
# pass
|
|
@ -1,90 +0,0 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :logHandler.py
|
||||
@Author :SHJ
|
||||
@Date :2021/9/6
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
# from logging import handlers
|
||||
import time
|
||||
import datetime
|
||||
|
||||
|
||||
class LogHandler:
|
||||
"""
|
||||
生成日志
|
||||
"""
|
||||
__logger = logging.getLogger("mylog")
|
||||
__format_str = logging.Formatter("[%(asctime)s] [%(process)d] [%(levelname)s] - %(module)s.%(funcName)s "
|
||||
"(%(filename)s:%(lineno)d) - %(message)s")
|
||||
__log_path = None
|
||||
|
||||
@staticmethod
|
||||
def init_log_handler(log_name):
|
||||
"""
|
||||
初始化日志
|
||||
:param log_name: 日志保存的路径和名称
|
||||
:return:
|
||||
"""
|
||||
path = os.getcwd()
|
||||
current_time = time.strftime("%Y-%m-%d-%H-%M-%S", time.localtime(time.time()))
|
||||
LogHandler.__log_path = os.path.join(path, log_name + current_time + ".log")
|
||||
para_dir = os.path.split(LogHandler.__log_path)
|
||||
if not os.path.exists(para_dir[0]):
|
||||
os.makedirs(para_dir[0])
|
||||
# 删除七天以前的文件
|
||||
LogHandler.delete_outdate_files(para_dir[0], 7)
|
||||
|
||||
# 方法1:普通日志
|
||||
LOG_FORMAT = "[%(asctime)s] [%(process)d] [%(levelname)s]- %(message)s ---from: %(module)s.%(funcName)s" \
|
||||
" (%(filename)s:Line%(lineno)d) "
|
||||
DATE_FORMAT = "%m/%d/%Y %H:%M:%S"
|
||||
fp = logging.FileHandler(LogHandler.__log_path, encoding='utf-8')
|
||||
fs = logging.StreamHandler()
|
||||
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT, datefmt=DATE_FORMAT, handlers=[fp, fs]) # 调用
|
||||
|
||||
# 方法2:回滚日志
|
||||
# LogHandler.__logger.setLevel(logging.DEBUG)
|
||||
# th = handlers.TimedRotatingFileHandler(filename=LogHandler.__log_path, when='S', interval=1,
|
||||
# backupCount=2, encoding='utf-8')
|
||||
# th.suffix = "%Y-%m-%d-%H-%M-%S.log"
|
||||
# th.setFormatter(LogHandler.__format_str)
|
||||
# th.setLevel(level=logging.DEBUG)
|
||||
|
||||
# console = logging.StreamHandler()
|
||||
# console.setLevel(logging.INFO)
|
||||
# LogHandler.__logger.addHandler(console)
|
||||
# LogHandler.__logger.addHandler(th)
|
||||
|
||||
@staticmethod
|
||||
def delete_outdate_files(path, date_interval=7):
|
||||
"""
|
||||
删除目录下七天前创建的文件
|
||||
"""
|
||||
current_time = time.strftime("%Y-%m-%d", time.localtime(time.time()))
|
||||
current_timeList = current_time.split("-")
|
||||
current_time_day = datetime.datetime(int(current_timeList[0]), int(current_timeList[1]),
|
||||
int(current_timeList[2]))
|
||||
for root, dirs, files in os.walk(path):
|
||||
for item in files:
|
||||
item_format = item.split(".", 2)
|
||||
if item_format[1] == "log":
|
||||
file_path = os.path.join(root, item)
|
||||
create_time = time.strftime("%Y-%m-%d", time.localtime((os.stat(file_path)).st_mtime))
|
||||
create_time_list = create_time.split("-")
|
||||
create_time_day = datetime.datetime(int(create_time_list[0]), int(create_time_list[1]),
|
||||
int(create_time_list[2]))
|
||||
time_difference = (current_time_day - create_time_day).days
|
||||
if time_difference > date_interval:
|
||||
os.remove(file_path)
|
||||
|
||||
#
|
||||
# if __name__ == "__main__":
|
||||
# # eg2:
|
||||
# log_handler = LogHandler()
|
||||
# log_handler.init_log_handler(r"run_log\myrun1")
|
||||
# logging.warning("1")
|
||||
# print("done")
|
|
@ -1,47 +0,0 @@
|
|||
sar_img_name,phenology_id,phenology_name,roi_polygon
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,10,water,"POLYGON((118.91799748 31.46893509,118.91762055 31.46674878,118.9210883 31.46637183,118.9210883 31.46855814,118.91799748 31.46893509))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,10,water,"POLYGON((118.90864966 31.46388396,118.90861196 31.46384627,118.90857427 31.46380857,118.90608654 31.46188613,118.90608654 31.46181074,118.90940351 31.46015216,118.91155201 31.46199921,118.90864966 31.46388396))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,10,water,"POLYGON((118.91898928 31.45778718,118.91893038 31.45478336,118.91893038 31.45472446,118.91898928 31.45472446,118.9246432 31.45472446,118.9247021 31.45472446,118.92499657 31.45872956,118.91898928 31.45778718))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.89543794 31.46174102,118.89452125 31.4583153,118.8948831 31.45807405,118.89599277 31.46171689,118.89543794 31.46174102))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.8940629 31.45653006,118.89280848 31.45312847,118.8932427 31.45308022,118.89326683 31.45308022,118.89442475 31.45636119,118.8940629 31.45653006))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.89132626 31.44844806,118.89046628 31.4453566,118.89079168 31.44540309,118.89081492 31.44542633,118.89083817 31.44544958,118.89086141 31.44547282,118.89172139 31.44840157,118.89132626 31.44844806))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.89028034 31.4441944,118.89016413 31.44256732,118.89044304 31.44256732,118.89046628 31.44259056,118.89048953 31.4426138,118.89051277 31.44266029,118.8907452 31.44472901,118.89028034 31.4441944))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.89000143 31.44193973,118.88997819 31.44193973,118.88997819 31.44189324,118.88923442 31.43399026,118.8894436 31.43403675,118.89018737 31.44191648,118.89000143 31.44193973))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.88897526 31.42114936,118.8888637 31.4205915,118.8888637 31.4205729,118.89468368 31.41977331,118.89470227 31.41977331,118.89472087 31.41977331,118.89499978 31.42042414,118.88897526 31.42114936))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.88025459 31.42230226,118.88010583 31.42153986,118.88594441 31.42088902,118.885963 31.42088902,118.8866138 31.42155845,118.88025459 31.42230226))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.87595933 31.42280433,118.87577339 31.42222788,118.87977114 31.42178159,118.87999427 31.42228367,118.87595933 31.42280433))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.89870564 31.41809418,118.89826403 31.41748983,118.89826403 31.41746659,118.89828727 31.41744335,118.90193638 31.41511894,118.90261042 31.41551409,118.89870564 31.41809418))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,20,build,"POLYGON((118.86530883 31.42443948,118.86630754 31.42325911,118.87384328 31.42257814,118.87388867 31.42257814,118.87397946 31.42253274,118.87411565 31.42253274,118.87493278 31.42307752,118.87488738 31.42312292,118.87479659 31.42312292,118.86530883 31.42443948))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.91830432 31.38444815,118.91828107 31.3844249,118.91828107 31.38440166,118.9183508 31.38433193,118.91958267 31.3826816,118.91960591 31.3826816,118.9223718 31.38421571,118.9223718 31.38423895,118.9223718 31.38426219,118.92146533 31.38628443,118.91830432 31.38444815))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.93304021 31.37921823,118.93396992 31.37812576,118.93399316 31.37812576,118.93401641 31.37812576,118.93536449 31.37919499,118.93464396 31.38033395,118.93304021 31.37921823))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.93629419 31.37882309,118.93627095 31.37882309,118.93627095 31.37879984,118.93701472 31.37756791,118.93752606 31.37780035,118.93752606 31.37782359,118.93666608 31.37905553,118.93629419 31.37882309))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.93680553 31.38465734,118.9372239 31.38396002,118.93817686 31.38454112,118.9382001 31.38454112,118.93822334 31.38456437,118.93810713 31.38533142,118.93808389 31.38535467,118.93680553 31.38465734))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.94384808 31.38312765,118.94420137 31.38271856,118.94421997 31.38271856,118.94423856 31.38271856,118.94505671 31.38325782,118.94453607 31.38387146,118.94384808 31.38312765))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.94066848 31.37855322,118.94096598 31.37806975,118.94096598 31.37805115,118.94096598 31.37803256,118.94096598 31.37801396,118.94096598 31.37799537,118.94223039 31.37881356,118.94163538 31.37946439,118.94161678 31.37946439,118.94066848 31.37855322))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.93942267 31.3813797,118.9396272 31.38108218,118.9396458 31.38108218,118.94048254 31.38154706,118.94022222 31.38204913,118.93942267 31.3813797))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.94446169 31.38115656,118.94479639 31.38069168,118.94576329 31.38136111,118.94546578 31.38193756,118.94446169 31.38115656))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.94968666 31.38693968,118.94968666 31.38690249,118.9498912 31.38660496,118.94990979 31.38660496,118.95063496 31.38701406,118.95033746 31.38740456,118.94968666 31.38693968))"
|
||||
GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho,30,road,"POLYGON((118.95310799 31.37931563,118.95310799 31.37929703,118.9537216 31.37885075,118.95374019 31.37883215,118.95379598 31.37879496,118.95463271 31.37931563,118.95418645 31.3798363,118.95310799 31.37931563))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,10,water,"POLYGON((118.91799748 31.46893509,118.91762055 31.46674878,118.9210883 31.46637183,118.9210883 31.46855814,118.91799748 31.46893509))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,10,water,"POLYGON((118.90864966 31.46388396,118.90861196 31.46384627,118.90857427 31.46380857,118.90608654 31.46188613,118.90608654 31.46181074,118.90940351 31.46015216,118.91155201 31.46199921,118.90864966 31.46388396))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,10,water,"POLYGON((118.91898928 31.45778718,118.91893038 31.45478336,118.91893038 31.45472446,118.91898928 31.45472446,118.9246432 31.45472446,118.9247021 31.45472446,118.92499657 31.45872956,118.91898928 31.45778718))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.89543794 31.46174102,118.89452125 31.4583153,118.8948831 31.45807405,118.89599277 31.46171689,118.89543794 31.46174102))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.8940629 31.45653006,118.89280848 31.45312847,118.8932427 31.45308022,118.89326683 31.45308022,118.89442475 31.45636119,118.8940629 31.45653006))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.89132626 31.44844806,118.89046628 31.4453566,118.89079168 31.44540309,118.89081492 31.44542633,118.89083817 31.44544958,118.89086141 31.44547282,118.89172139 31.44840157,118.89132626 31.44844806))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.89028034 31.4441944,118.89016413 31.44256732,118.89044304 31.44256732,118.89046628 31.44259056,118.89048953 31.4426138,118.89051277 31.44266029,118.8907452 31.44472901,118.89028034 31.4441944))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.89000143 31.44193973,118.88997819 31.44193973,118.88997819 31.44189324,118.88923442 31.43399026,118.8894436 31.43403675,118.89018737 31.44191648,118.89000143 31.44193973))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.88897526 31.42114936,118.8888637 31.4205915,118.8888637 31.4205729,118.89468368 31.41977331,118.89470227 31.41977331,118.89472087 31.41977331,118.89499978 31.42042414,118.88897526 31.42114936))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.88025459 31.42230226,118.88010583 31.42153986,118.88594441 31.42088902,118.885963 31.42088902,118.8866138 31.42155845,118.88025459 31.42230226))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.87595933 31.42280433,118.87577339 31.42222788,118.87977114 31.42178159,118.87999427 31.42228367,118.87595933 31.42280433))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.89870564 31.41809418,118.89826403 31.41748983,118.89826403 31.41746659,118.89828727 31.41744335,118.90193638 31.41511894,118.90261042 31.41551409,118.89870564 31.41809418))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,20,build,"POLYGON((118.86530883 31.42443948,118.86630754 31.42325911,118.87384328 31.42257814,118.87388867 31.42257814,118.87397946 31.42253274,118.87411565 31.42253274,118.87493278 31.42307752,118.87488738 31.42312292,118.87479659 31.42312292,118.86530883 31.42443948))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.91830432 31.38444815,118.91828107 31.3844249,118.91828107 31.38440166,118.9183508 31.38433193,118.91958267 31.3826816,118.91960591 31.3826816,118.9223718 31.38421571,118.9223718 31.38423895,118.9223718 31.38426219,118.92146533 31.38628443,118.91830432 31.38444815))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.93304021 31.37921823,118.93396992 31.37812576,118.93399316 31.37812576,118.93401641 31.37812576,118.93536449 31.37919499,118.93464396 31.38033395,118.93304021 31.37921823))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.93629419 31.37882309,118.93627095 31.37882309,118.93627095 31.37879984,118.93701472 31.37756791,118.93752606 31.37780035,118.93752606 31.37782359,118.93666608 31.37905553,118.93629419 31.37882309))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.93680553 31.38465734,118.9372239 31.38396002,118.93817686 31.38454112,118.9382001 31.38454112,118.93822334 31.38456437,118.93810713 31.38533142,118.93808389 31.38535467,118.93680553 31.38465734))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.94384808 31.38312765,118.94420137 31.38271856,118.94421997 31.38271856,118.94423856 31.38271856,118.94505671 31.38325782,118.94453607 31.38387146,118.94384808 31.38312765))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.94066848 31.37855322,118.94096598 31.37806975,118.94096598 31.37805115,118.94096598 31.37803256,118.94096598 31.37801396,118.94096598 31.37799537,118.94223039 31.37881356,118.94163538 31.37946439,118.94161678 31.37946439,118.94066848 31.37855322))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.93942267 31.3813797,118.9396272 31.38108218,118.9396458 31.38108218,118.94048254 31.38154706,118.94022222 31.38204913,118.93942267 31.3813797))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.94446169 31.38115656,118.94479639 31.38069168,118.94576329 31.38136111,118.94546578 31.38193756,118.94446169 31.38115656))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.94968666 31.38693968,118.94968666 31.38690249,118.9498912 31.38660496,118.94990979 31.38660496,118.95063496 31.38701406,118.95033746 31.38740456,118.94968666 31.38693968))"
|
||||
GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho,30,road,"POLYGON((118.95310799 31.37931563,118.95310799 31.37929703,118.9537216 31.37885075,118.95374019 31.37883215,118.95379598 31.37879496,118.95463271 31.37931563,118.95418645 31.3798363,118.95310799 31.37931563))"
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue