microproduct-l-sar/soilSalinity-L-SAR/pspHAAlphaDecomposition.py

436 lines
22 KiB
Python
Raw Normal View History

2024-01-03 01:42:21 +00:00
# -*- coding: UTF-8 -*-
"""
@Project:__init__.py
@File:pspHAAlphaDecomposition.py
@Function: Cloude-Pottier eigenvector/eigenvalue based decomposition of a 3x3 coherency matrix [T3]
(Averaging using a sliding window)
V1.0.1:1可选分解特征2bin转tif格式
@Contact:
@Author:SHJ
@Date:2021/9/24 9:06
@Version:1.0.1
"""
import os
import shutil
import subprocess
import struct
import numpy as np
import glob
from PIL import Image
import logging
logger = logging.getLogger("mylog")
import multiprocessing
class PspHAAlphaDecomposition:
"""
调用polsarpro4.2.0的Cloude-Pottier极化分解
"""
def __init__(self,normalization = False):
self.__normalization = normalization #是否做归一化
self.__res_h_a_alpha_decomposition_T3 = {}
self.__res_h_a_alpha_eigenvalue_set_T3 = {}
self.__res_h_a_alpha_eigenvector_set_T3 = {}
pass
def api_creat_h_a_alpha_features_single_process(self, h_a_alpha_out_dir,
h_a_alpha_decomposition_T3_path, h_a_alpha_eigenvalue_set_T3_path,
h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir,is_trans_to_tif=True, is_read_to_dic=False):
"""
对porsarpro格式T3矩阵做Cloude-Pottier分解(h_a_alpha_decompositionh_a_alpha_eigenvalue_set h_a_alpha_eigenvector_set)
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
:param h_a_alpha_eigenvector_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
"""
h_a_alpha_features ={}
h_a_alpha_features.update(self.api_h_a_alpha_decomposition_T3(h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, is_trans_to_tif,is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1)))
logger.info("run h_a_alpha_decomposition_T3 success!")
logger.info('progress bar: 40%')
h_a_alpha_features.update(self.api_h_a_alpha_eigenvalue_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, is_trans_to_tif, is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1)))
logger.info("run h_a_alpha_eigenvalue_set_T3 success!")
logger.info('progress bar: 60%')
h_a_alpha_features.update(self.api_h_a_alpha_eigenvector_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, is_trans_to_tif,is_read_to_dic, *(1, 1, 1, 1, 1)))
logger.info("run h_a_alpha_eigenvector_set_T3 success!")
logger.info('progress bar: 80%')
if is_trans_to_tif:
self.api_trans_T3_to_tif(h_a_alpha_out_dir, polsarpro_in_dir)
if is_read_to_dic:
h_a_alpha_features.update(self.api_read_T3_matrix(polsarpro_in_dir))
return h_a_alpha_features
def api_creat_h_a_alpha_features(self, h_a_alpha_out_dir,
h_a_alpha_decomposition_T3_path, h_a_alpha_eigenvalue_set_T3_path,
h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir,is_trans_to_tif=True, is_read_to_dic=False):
"""
对porsarpro格式T3矩阵做Cloude-Pottier分解(h_a_alpha_decompositionh_a_alpha_eigenvalue_set h_a_alpha_eigenvector_set)
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
:param h_a_alpha_eigenvector_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
"""
pool = multiprocessing.Pool(processes=3)
pl = []
logger.info("run h_a_alpha_decomposition_T3!")
pl.append(pool.apply_async(self.api_h_a_alpha_decomposition_T3, (h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, is_trans_to_tif, is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1))))
logger.info("run h_a_alpha_eigenvalue_set_T3!")
pl.append(pool.apply_async(self.api_h_a_alpha_eigenvalue_set_T3, (h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, is_trans_to_tif, is_read_to_dic, *(1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1))))
logger.info("run h_a_alpha_eigenvector_set_T3!")
pl.append(pool.apply_async(self.api_h_a_alpha_eigenvector_set_T3, (h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, is_trans_to_tif,is_read_to_dic, *(1, 1, 1, 1, 1))))
pool.close()
pool.join()
logger.info(pl)
logger.info('progress bar: 60%')
h_a_alpha_features ={}
h_a_alpha_features.update(self.__res_h_a_alpha_decomposition_T3)
logger.info("run h_a_alpha_decomposition_T3 success!")
h_a_alpha_features.update(self.__res_h_a_alpha_eigenvalue_set_T3)
logger.info("run h_a_alpha_eigenvalue_set_T3 success!")
h_a_alpha_features.update(self.__res_h_a_alpha_eigenvector_set_T3)
logger.info("run h_a_alpha_eigenvector_set_T3 success!")
if is_trans_to_tif:
self.api_trans_T3_to_tif(h_a_alpha_out_dir, polsarpro_in_dir)
if is_read_to_dic:
h_a_alpha_features.update(self.api_read_T3_matrix(polsarpro_in_dir))
return h_a_alpha_features
def api_h_a_alpha_decomposition_T3(self, h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, is_trans_to_tif=True, is_read_to_dic=False, *args):
"""
对porsarpro格式T3矩阵做Cloude-Pottier分解H-A-Alpha分解
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:param is_trans_to_tif:分解特征是否转换为tif
:param is_read_to_dic:分解特征是否以字典输出
:param *args:9个可选分解特征(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0输出:1
:return : 包含分解特征的字典
"""
if not os.path.exists(h_a_alpha_out_dir):
os.makedirs(h_a_alpha_out_dir)
self.__h_a_alpha_decomposition_T3(h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, *args)
name_list = ['entropy', 'anisotropy', 'alpha', 'beta', 'delta', 'gamma', 'lambda',
'combination_1mH1mA', 'combination_1mHA', 'combination_H1mA', 'combination_HA']
if is_trans_to_tif:
self.__write_haalpha_to_tif(h_a_alpha_out_dir, h_a_alpha_out_dir, name_list)
if is_read_to_dic:
self.__res_h_a_alpha_decomposition_T3 = self.__read_haalpha(h_a_alpha_out_dir, name_list)
return self.__res_h_a_alpha_decomposition_T3
else:
return {}
def api_h_a_alpha_eigenvalue_set_T3(self, h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, is_trans_to_tif=True, is_read_to_dic=False, *args):
"""
Cloude-Pottier eigenvalue based decomposition of a coherency matrix
:param h_a_alpha_out_dir : Cloude-Pottier eigenvalue
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:param is_trans_to_tif:分解特征是否转换为tif
:param is_read_to_dic:分解特征是否以字典输出
:param *args:9个可选分解特征(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0输出:1
:return : 包含分解特征的字典
"""
if not os.path.exists(h_a_alpha_out_dir):
os.makedirs(h_a_alpha_out_dir)
self.__h_a_alpha_eigenvalue_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, *args)
name_list = ['anisotropy', 'anisotropy_lueneburg', 'anisotropy12', 'asymetry', 'derd', 'derd_norm', 'entropy_shannon',
'entropy_shannon_I', 'entropy_shannon_I_norm', 'entropy_shannon_norm', 'entropy_shannon_P',
'entropy_shannon_P_norm', 'l1', 'l2', 'l3', 'p1', 'p2', 'p3', 'pedestal', 'polarisation_fraction',
'rvi', 'serd', 'serd_norm']
if is_trans_to_tif:
self.__write_haalpha_to_tif(h_a_alpha_out_dir, h_a_alpha_out_dir, name_list)
if is_read_to_dic:
self.__res_h_a_alpha_eigenvalue_set_T3 = self.__read_haalpha(h_a_alpha_out_dir, name_list)
return self.__res_h_a_alpha_eigenvalue_set_T3
else:
return {}
def api_h_a_alpha_eigenvector_set_T3(self, h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, is_trans_to_tif=True, is_read_to_dic=False, *args):
"""
Cloude-Pottier eigenvector based decomposition of a coherency matrix
:param h_a_alpha_out_dir : Cloude-Pottier eigenvector
:param h_a_alpha_eigenvector_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:param is_trans_to_tif:分解特征是否转换为tif
:param is_read_to_dic:分解特征是否以字典输出
:param *args:9个可选分解特征(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0输出:1
:return : 包含分解特征的字典
"""
if not os.path.exists(h_a_alpha_out_dir):
os.makedirs(h_a_alpha_out_dir)
self.__h_a_alpha_eigenvector_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, *args)
name_list = ['alpha', 'alpha1', 'alpha2', 'alpha3',
'beta', 'beta1', 'beta2', 'beta3',
'delta', 'delta1', 'delta2', 'delta3',
'gamma', 'gamma1', 'gamma2', 'gamma3']
if is_trans_to_tif:
self.__write_haalpha_to_tif(h_a_alpha_out_dir, h_a_alpha_out_dir, name_list)
if is_read_to_dic:
self.__res_h_a_alpha_eigenvector_set_T3 = self.__read_haalpha(h_a_alpha_out_dir, name_list)
return self.__res_h_a_alpha_eigenvector_set_T3
else:
return {}
def api_read_T3_matrix(self,polsarpro_T3_dir):
"""
读取T3矩阵转换字典
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:return : 包含T3矩阵的字典
"""
name_list = ['T11', 'T12_imag', 'T12_real',
'T22', 'T13_imag', 'T13_real',
'T33', 'T23_imag', 'T23_real']
return self.__read_haalpha(polsarpro_T3_dir, name_list)
def api_trans_T3_to_tif(self, out_tif_dir, polsarpro_T3_dir):
"""
将T3矩阵从bin格式转换为tif格式
:param out_tif_dir:保存路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
"""
name_list = ['T11', 'T12_imag', 'T12_real',
'T22', 'T13_imag', 'T13_real',
'T33', 'T23_imag', 'T23_real']
self.__write_haalpha_to_tif(out_tif_dir, polsarpro_T3_dir, name_list)
@staticmethod
def __h_a_alpha_decomposition_T3(h_a_alpha_out_dir, h_a_alpha_decomposition_T3_path, polsarpro_in_dir, *args):
"""
对porsarpro格式T3矩阵做Cloude-Pottier分解H-A-Alpha分解
:param h_a_alpha_out_dir : 输出h_a_alpha二进制数据的目录
:param h_a_alpha_decomposition_T3_path: haalphadecompositionT3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:param *args:9个可选输出变量(alpbetdelgam,Lambda,alpha,entropy,anisotropy,
CombHA,CombH1mA,Comb1mHA,Comb1mH1mA),不输出:0输出:1
"""
if not os.path.exists(h_a_alpha_decomposition_T3_path):
raise Exception(h_a_alpha_decomposition_T3_path +' is not exists!')
NwinFilter = 1
offsetRow = 0
offsetCol = 0
config_path = os.path.join(polsarpro_in_dir, 'config.txt')
config = open(config_path, 'r').read().split('\n', -1)
numRow = int(config[1])
numCol = int(config[4])
alpbetdelgam = int(args[0])
Lambda = int(args[1])
alpha = int(args[2])
entropy = int(args[3])
anisotropy = int(args[4])
CombHA = int(args[5])
CombH1mA = int(args[6])
Comb1mHA = int(args[7])
Comb1mH1mA = int(args[8])
para_list = [h_a_alpha_decomposition_T3_path, polsarpro_in_dir, h_a_alpha_out_dir,
str(NwinFilter), str(offsetRow), str(offsetCol), str(numRow), str(numCol),
str(alpbetdelgam), str(Lambda), str(alpha), str(entropy), str(anisotropy),
str(CombHA), str(CombH1mA), str(Comb1mHA), str(Comb1mH1mA)]
cmd = ' '.join(para_list)
result_tuple = subprocess.getstatusoutput(cmd)
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1 or result_tuple[1].find('Could not open') != -1:
raise Exception(result_tuple[1])
shutil.copyfile(config_path, os.path.join(h_a_alpha_out_dir, 'config.txt'))
@staticmethod
def __h_a_alpha_eigenvalue_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, *args):
"""
Cloude-Pottier eigenvalue based decomposition of a coherency matrix
:param h_a_alpha_out_dir : Cloude-Pottier eigenvalue
:param h_a_alpha_eigenvalue_set_T3_path: h_a_alpha_eigenvalue_set_T3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:param *args:11个可选输出变量(eigen123,proba123,anisotropy,anisotropy12,asymetry,
polarisation_fraction,erd,rvi,pedestal,shannon,lueneburg),不输出0输出1
"""
if not os.path.exists(h_a_alpha_eigenvalue_set_T3_path):
raise Exception(h_a_alpha_eigenvalue_set_T3_path +' is not exists!')
NwinFilter = 1
offsetRow = 0
offsetCol = 0
config_path = os.path.join(polsarpro_in_dir, 'config.txt')
config = open(config_path, 'r').read().split('\n', -1)
numRow = int(config[1])
numCol = int(config[4])
eigen123 = int(args[0])
proba123 = int(args[1])
anisotropy = int(args[2])
anisotropy12 = int(args[3])
asymetry = int(args[4])
polarisation_fraction = int(args[5])
erd = int(args[6])
rvi = int(args[7])
pedestal = int(args[8])
shannon = int(args[9])
lueneburg = int(args[10])
para_list = [h_a_alpha_eigenvalue_set_T3_path, polsarpro_in_dir, h_a_alpha_out_dir,
str(NwinFilter), str(offsetRow), str(offsetCol), str(numRow), str(numCol),
str(eigen123), str(proba123), str(anisotropy), str(anisotropy12), str(asymetry),
str(polarisation_fraction), str(erd), str(rvi), str(pedestal),
str(shannon), str(lueneburg)]
cmd = ' '.join(para_list)
result_tuple = subprocess.getstatusoutput(cmd)
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1 or result_tuple[1].find('Could not open') != -1:
raise Exception(result_tuple[1])
shutil.copyfile(config_path, os.path.join(h_a_alpha_out_dir, 'config.txt'))
@staticmethod
def __h_a_alpha_eigenvector_set_T3(h_a_alpha_out_dir, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, *args):
"""
Cloude-Pottier eigenvector based decomposition of a coherency matrix
:param h_a_alpha_out_dir : Cloude-Pottier eigenvector
:param h_a_alpha_eigenvector_set_T3_set_T3_path: h_a_alpha_eigenvector_set_T3.exe路径
:param polsarpro_in_dir:输入porsarpro格式T3矩阵目录包含.bin,.config
:param *args:5个可选输出变量(alpha123,beta123,delta123,gamma123,alpbetdelgam),不输出0输出1
"""
if not os.path.exists(h_a_alpha_eigenvector_set_T3_path):
raise Exception(h_a_alpha_eigenvector_set_T3_path +' is not exists!')
NwinFilter = 1
offsetRow = 0
offsetCol = 0
config_path = os.path.join(polsarpro_in_dir, 'config.txt')
config = open(config_path, 'r').read().split('\n', -1)
numRow = int(config[1])
numCol = int(config[4])
alpha123 = int(args[0])
beta123 = int(args[1])
delta123 = int(args[2])
gamma123 = int(args[3])
alpbetdelgam = int(args[4])
para_list = [h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir, h_a_alpha_out_dir,
str(NwinFilter), str(offsetRow), str(offsetCol), str(numRow), str(numCol),
str(alpha123), str(beta123), str(delta123), str(gamma123), str(alpbetdelgam)]
cmd = ' '.join(para_list)
result_tuple = subprocess.getstatusoutput(cmd)
if result_tuple[0] != 1 or result_tuple[1].find('error') != -1 or result_tuple[1].find('Could not open') != -1:
raise Exception(result_tuple[1])
shutil.copyfile(config_path, os.path.join(h_a_alpha_out_dir, 'config.txt'))
def __read_haalpha(self, h_a_alpha_dir, name_list):
"""
读取H-A-Alpha分解二进制数据输出为矩阵格式的字典
:param h_a_alpha_dir : h_a_alpha二进制数据的目录,包含.bin,.config
:name_list : 需要组合的名称集合['entropy', 'anisotropy', 'alpha', 'beta', 'delta', 'gamma', 'lambda',
'combination_1mH1mA', 'combination_1mHA', 'combination_H1mA', 'combination_HA']
:return : 包含H-A-Alpha矩阵信息的字典
"""
dir = os.path.join(h_a_alpha_dir, '*.bin')
bin_paths = list(glob.glob(dir))
haalpha_dic ={}
for name in name_list:
path = os.path.join(h_a_alpha_dir, name + '.bin')
if path in bin_paths:
img = self.__read_bin_to_img(path)
haalpha_dic.update({name: img})
return haalpha_dic
def standardization(self, data, num=1):
# 矩阵标准化到[0,1]
data[np.isnan(data)] = np.min(data) # 异常值填充为0
_range = np.max(data) - np.min(data)
return (data - np.min(data)) / _range * num
def __write_haalpha_to_tif(self, out_tif_dir, h_a_alpha_dir, name_list):
"""
读取H-A-Alpha分解二进制数据输出为矩阵格式的字典
:param out_tif_dir : tif的输出路径
:param h_a_alpha_dir : h_a_alpha二进制数据的目录,包含.bin,.config
:name_list : 需要组合的名称集合['entropy', 'anisotropy', 'alpha', 'beta', 'delta', 'gamma', 'lambda',
'combination_1mH1mA', 'combination_1mHA', 'combination_H1mA', 'combination_HA']
"""
dir = os.path.join(h_a_alpha_dir, '*.bin')
bin_paths = list(glob.glob(dir))
for name in name_list:
in_path = os.path.join(h_a_alpha_dir, name + '.bin')
out_path = os.path.join(out_tif_dir, name + '.tif')
if in_path in bin_paths:
img_array = self.__read_bin_to_img(in_path)
if self.__normalization is True:
img_array = self.standardization(img_array, num=1)
out_image = Image.fromarray(img_array)
out_image.save(out_path)
@staticmethod
def __read_bin_to_img(bin_path):
"""
读取bin格式二进制数据输出为矩阵
:param bin_path : bin文件的路径包含.bin,.config
:return : 矩阵信息
"""
(bin_dir, bin_name) = os.path.split(bin_path)
config_path = os.path.join(bin_dir, 'config.txt')
config = open(config_path, 'r').read().split('\n', -1)
rows = int(config[1])
cols = int(config[4])
bin_file = open(bin_path, 'rb') # 打开二进制文件
size = os.path.getsize(bin_path) # 获得文件大小
if size < rows*cols*4:
raise Exception('bin size less than rows*cols*4! size:', size, 'byte, rows:', rows, 'cols:', cols)
img = np.zeros([rows, cols], dtype=np.float32)
for row in range(rows):
data = bin_file.read(4 * cols) # 每次读取一行的二进制数据
row_data = struct.unpack('f' * cols, data) # 转为一行float数据
img[row, :] = row_data
bin_file.close()
return img
# if __name__ == '__main__':
# h_a_alpha_decomposition_T3_path = 'D:\\PolSARpro_v4.2.0\\Soft\data_process_sngl\\h_a_alpha_decomposition_T3.exe'
# h_a_alpha_eigenvalue_set_T3_path = 'D:\\PolSARpro_v4.2.0\\Soft\data_process_sngl\\h_a_alpha_eigenvalue_set_T3.exe'
# h_a_alpha_eigenvector_set_T3_path = 'D:\\PolSARpro_v4.2.0\\Soft\data_process_sngl\\h_a_alpha_eigenvector_set_T3.exe'
# polsarpro_in_dir = 'D:\\PolSARpro_v4.2.0\\in'
# haalpha_out_dir = 'D:\\PolSARpro_v4.2.0\\out'
# h_a_alpha_eigenvalue_set_T3_out = 'D:\\PolSARpro_v4.2.0\\out\\h_a_alpha_eigenvalue_set_T3'
# h_a_alpha_eigenvector_set_T3_out = 'D:\\PolSARpro_v4.2.0\\out\\h_a_alpha_eigenvector_set_T3'
#
# haa = PspHAAlphaDecomposition()
# h_a_alpha_features = haa.api_creat_h_a_alpha_features(haalpha_out_dir, h_a_alpha_decomposition_T3_path, h_a_alpha_eigenvalue_set_T3_path, h_a_alpha_eigenvector_set_T3_path, polsarpro_in_dir)
# haa = PspHAAlphaDecomposition(normalization=True)
# psp_path = r"I:\MicroWorkspace\product\C-SAR\SoilSalinity\GF3B_MYC_QPSI_003581_E120.6_N31.3_20220729_L1A_AHV_L10000073024_RPCpsp_t3"
# t3_path = r"I:\MicroWorkspace\product\C-SAR\SoilSalinity\t3"
# exe_dir = r"I:\microproduct\soilSalinity/"
# haa.api_creat_h_a_alpha_features(h_a_alpha_out_dir=t3_path,
# h_a_alpha_decomposition_T3_path= exe_dir + 'h_a_alpha_decomposition_T3.exe',
# h_a_alpha_eigenvalue_set_T3_path= exe_dir + 'h_a_alpha_eigenvalue_set_T3.exe',
# h_a_alpha_eigenvector_set_T3_path=exe_dir +'h_a_alpha_eigenvector_set_T3.exe',
# polsarpro_in_dir=psp_path)
# print('done')