microproduct/tool/algorithm/polsarpro/createfeature.py

190 lines
10 KiB
Python
Raw Normal View History

2023-08-28 10:17:29 +00:00
from tool.algorithm.algtools.MetaDataHandler import Calibration
from tool.algorithm.polsarpro.AHVToPolsarpro import AHVToPolsarpro
from tool.algorithm.polsarpro.pspLeeRefinedFilterT3 import LeeRefinedFilterT3
from tool.algorithm.polsarpro.pspCloudePottierDecomposition import PspCloudePottierDecomposition
from tool.algorithm.polsarpro.pspFreemanDecomposition import PspFreemanDecomposition
from tool.algorithm.polsarpro.pspYamaguchiDecomposition import PspYamaguchiDecomposition
from tool.algorithm.polsarpro.pspTouziDecomposition import PspTouziDecomposition
from tool.algorithm.polsarpro.bin2tif import write_bin_to_tif
from tool.algorithm.polsarpro.pspHAAlphaDecomposition import PspHAAlphaDecomposition
from tool.algorithm.xml.AlgXmlHandle import InitPara
import logging
import os
import shutil
import glob
logger = logging.getLogger("mylog")
class CreateFeature:
"""
生产特征
"""
def __init__(self, debug = False, exe_dir = ''):
self._debug = debug
self._exe_dir = exe_dir
pass
def ahv_to_t3(self, workspace_processing_path, workspace_preprocessing_path, hh_hv_vh_vv_list, name='',FILTER_SIZE=3):
# 全极化tif转bin格式T3数据
atp = AHVToPolsarpro()
atp = AHVToPolsarpro(hh_hv_vh_vv_list)
lee_filter_path = os.path.join(workspace_processing_path, name, 'lee_filter\\') # workspace_processing_path + name + '\\lee_filter\\'
if self._debug == False:
t3_path = os.path.join(workspace_processing_path, name, 'psp_t3\\') # workspace_processing_path + name + '\\psp_t3\\'
# atp.ahv_to_polsarpro_t3(t3_path, tif_path)
polarization = ['HH', 'HV', 'VH', 'VV']
if os.path.exists(workspace_preprocessing_path + name + '\\'):
meta_xml_paths = list(glob.glob(os.path.join(workspace_preprocessing_path + name, '*.meta.xml')))
meta_dic = InitPara.get_meta_dic_new(meta_xml_paths, name)
calibration = Calibration.get_Calibration_coefficient(meta_dic['Origin_META'], polarization)
tif_path = atp.calibration(calibration, workspace_preprocessing_path, name)
atp.ahv_to_polsarpro_t3_veg(t3_path, tif_path)
# Lee滤波
leeFilter = LeeRefinedFilterT3()
leeFilter.api_lee_refined_filter_T3('', t3_path, lee_filter_path, 0, 0, atp.rows(), atp.cols(), FILTER_SIZE)
logger.info("refine_lee filter success!")
return lee_filter_path
def decompose(self,workspace_processing_path, name, t3_path, rows, cols, hh_hv_vh_vv_dic={},FeatureInput=['Freeman', 'Yamaguchi', 'Cloude']): # , 'Touzi'
"""
极化分解FreemanTouziYamaguchiCloude
:param t3_path: t3文件路径
:param rows: 影像行数
:return cols:影像列数
"""
# 计算特征组合
exeDir = self._exe_dir
outFolderDic = {}
if 'Freeman' in FeatureInput:
# freeman分解
freemanOutDir = os.path.join(workspace_processing_path, name + '\\freeman\\')
if self._debug == False:
freemDecom = PspFreemanDecomposition(exeDir, t3_path, freemanOutDir)
flag = freemDecom.api_freeman_decomposition_T3(0, 0, rows, cols)
if not flag:
logger.error('FreemanDecomposition err')
return False, None
outFolderDic['Freeman'] = freemanOutDir
# Touzi分解
if 'Touzi' in FeatureInput:
touziOutDir = os.path.join(workspace_processing_path, name + '\\touzi\\')
if not os.path.exists(touziOutDir):
os.makedirs(touziOutDir)
if self._debug == False:
# touzi分解耗时较长且对特征表达效果较差
p = PspTouziDecomposition(hh_hv_vh_vv_dic, touziOutDir)
p.Touzi_decomposition_multiprocessing()
outFolderDic['Touzi'] = touziOutDir
if 'Yamaguchi' in FeatureInput:
# Yamaguchi分解
yamaguchiOutDir = os.path.join(workspace_processing_path, name + '\\yamaguchi\\')
if self._debug == False:
yamaguchiDecom = PspYamaguchiDecomposition(exeDir, t3_path, yamaguchiOutDir)
flag = yamaguchiDecom.api_yamaguchi_4components_decomposition_T3(0, 0, rows, cols)
if not flag:
logger.error('CloudePottierDecomposition err')
return False, None
outFolderDic['Yamaguchi'] = yamaguchiOutDir
if 'Cloude' in FeatureInput:
# CloudePottier分解
cloudeOutDir = os.path.join(workspace_processing_path, name + '\\cloude\\')
if self._debug == False:
cloudeDecom = PspCloudePottierDecomposition(
exeDir, t3_path, cloudeOutDir)
flag = cloudeDecom.api_h_a_alpha_decomposition_T3(
0, 0, rows, cols)
if not flag:
logger.error('CloudePottierDecomposition err')
return False, None
outFolderDic['Cloude'] = cloudeOutDir
return True, outFolderDic
def creat_h_a_alpha_features(self, t3_path, out_dir):
logger.info('ahv transform to polsarpro T3 matrix success!')
logger.info('progress bar: 20%')
h_a_alpha_decomposition_T3_path = os.path.join(self._exe_dir, 'h_a_alpha_decomposition_T3.exe')
h_a_alpha_eigenvalue_set_T3_path = os.path.join(self._exe_dir, 'h_a_alpha_eigenvalue_set_T3.exe')
h_a_alpha_eigenvector_set_T3_path = os.path.join(self._exe_dir, 'h_a_alpha_eigenvector_set_T3.exe')
if self._debug == False:
haa = PspHAAlphaDecomposition(normalization=True)
haa.api_creat_h_a_alpha_features(h_a_alpha_out_dir=out_dir,
h_a_alpha_decomposition_T3_path=h_a_alpha_decomposition_T3_path ,
h_a_alpha_eigenvalue_set_T3_path=h_a_alpha_eigenvalue_set_T3_path ,
h_a_alpha_eigenvector_set_T3_path=h_a_alpha_eigenvector_set_T3_path,
polsarpro_in_dir=t3_path)
def cereat_features_dic(self,outFolderDic, feature_tif_dir):
if not os.path.exists(feature_tif_dir):
os.makedirs(feature_tif_dir)
feature_tif_paths = {}
for key in outFolderDic:
feature_bin_dic = outFolderDic[key]
if key == 'Touzi':
for path in list(glob.glob(os.path.join(feature_bin_dic, '*.tif'))):
name = os.path.split(path)[1].split('.')[0]
if self._debug == False:
shutil.copyfile(path, os.path.join(feature_tif_dir, name + '.tif')) # feature_tif_dir + '\\' + name + '.tif')
feature_tif_paths.update({name: os.path.join(feature_tif_dir, name + '.tif')}) # feature_tif_dir + '\\' + name + '.tif'
else:
feature_tif_paths.update(write_bin_to_tif(feature_tif_dir, feature_bin_dic))
return feature_tif_paths
@staticmethod
def decompose_single_tar(hh_hv_vh_vv_list, workspace_processing_path, workspace_preprocessing_path, name, exe_dir, rows, cols, FILTER_SIZE = 3, debug =False, FeatureInput=['Freeman', 'Yamaguchi', 'Cloude']):
2023-08-28 10:17:29 +00:00
hh_hv_vh_vv_dic = {}
hh_hv_vh_vv_dic.update({'HH': hh_hv_vh_vv_list[0]})
hh_hv_vh_vv_dic.update({'HV': hh_hv_vh_vv_list[1]})
hh_hv_vh_vv_dic.update({'VH': hh_hv_vh_vv_list[2]})
hh_hv_vh_vv_dic.update({'VV': hh_hv_vh_vv_list[3]})
t3_path = os.path.join(workspace_processing_path, name, "lee_filter") # workspace_processing_path + name + "\\lee_filter"
feature_tif_dir = os.path.join(workspace_processing_path, name, 'features') # workspace_processing_path + name + "\\features"
cfeature = CreateFeature(debug, exe_dir)
cfeature.creat_h_a_alpha_features(t3_path, feature_tif_dir)
t3_path = cfeature.ahv_to_t3(workspace_processing_path, workspace_preprocessing_path, hh_hv_vh_vv_list, name, FILTER_SIZE)
flag, outFolderDic = cfeature.decompose(workspace_processing_path, name, t3_path, rows, cols, hh_hv_vh_vv_dic, FeatureInput) # , 'Touzi'
2023-08-28 10:17:29 +00:00
cfeature.cereat_features_dic(outFolderDic, feature_tif_dir)
return feature_tif_dir
if __name__ == '__main__':
# # 实例1
# exe_dir = os.getcwd()
# dir = r'D:\MicroWorkspace\product\C-SAR\VegetationPhenology\Temporary\preprocessed/'
# hh_hv_vh_vv_list = [dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_HH_preprocessed.tif',
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_HV_preprocessed.tif',
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_VH_preprocessed.tif',
# dir +'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC_VV_preprocessed.tif']
#
# workspace_processing_path= r"D:\MicroWorkspace\product\C-SAR\VegetationPhenology\Temporary\processing/"
# name= 'GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_RPC'
# hh_hv_vh_vv_dic = {}
# hh_hv_vh_vv_dic.update({'HH': hh_hv_vh_vv_list[0]})
# hh_hv_vh_vv_dic.update({'HV': hh_hv_vh_vv_list[1]})
# hh_hv_vh_vv_dic.update({'VH': hh_hv_vh_vv_list[2]})
# hh_hv_vh_vv_dic.update({'VV': hh_hv_vh_vv_list[3]})
# t3_path = workspace_processing_path + name + "\\lee_filter"
# feature_tif_dir = workspace_processing_path + name + "\\features"
#
# cfeature = CreateFeature(False, exe_dir)
#
# cfeature.creat_h_a_alpha_features(t3_path, feature_tif_dir)
#
# t3_path = cfeature.ahv_to_t3(workspace_processing_path, hh_hv_vh_vv_list, name, 3)
# flag, outFolderDic = cfeature.decompose(workspace_processing_path, name, t3_path, 997, 1227, hh_hv_vh_vv_dic, FeatureInput=['Freeman', 'Touzi', 'Yamaguchi', 'Cloude'])
#
# feature_tifs_dic = cfeature.cereat_features_dic(outFolderDic, feature_tif_dir)
pass