From 57b2b0413c11b510798acaa8a0278e1eb644e4eb Mon Sep 17 00:00:00 2001
From: tian jiax <446100073@qq.com>
Date: Wed, 30 Oct 2024 11:16:40 +0800
Subject: [PATCH] =?UTF-8?q?=E4=BB=A3=E7=A0=81=E6=9B=B4=E6=96=B0?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit
---
Ortho/OrthoAlg.py | 252 ++++++++++++++++-
Ortho/OrthoMain.py | 259 ++++++++++++++++--
Ortho/Ortho_C_SAR_V3.xml | 8 +-
Ortho/config.ini | 2 +-
backScattering/BackScatteringAlg.py | 38 ++-
backScattering/BackScatteringMain.py | 9 +-
backScattering/BackScattering_C_SAR_V3.xml | 10 +-
dem-C-SAR/Dem.xml | 12 +-
dem-C-SAR/DemMain.py | 3 +-
dem-C-SAR/DemMain.spec | 24 ++
.../components/isceobj/Location/Offset.py | 224 +++++++--------
dem-C-SAR/geocoding.py | 26 +-
dem-sentiral/geocoding.py | 2 +-
landcover_c_sar/LandCoverMain.py | 9 +-
landcover_c_sar/LandCoverMain.spec | 24 ++
landcover_c_sar/LandCover_C_SAR_V3.xml | 8 +-
leafAreaIndex/LeafAreaIndex.xml | 26 +-
leafAreaIndex/LeafAreaIndex_C_SAR_V3.xml | 25 +-
leafAreaIndex/LeafIndexMain.py | 39 +--
leafAreaIndex/LeafIndexXmlInfo.py | 19 +-
leafAreaIndex/config.ini | 2 +-
leafAreaIndex/sample_process.py | 85 +++++-
soilMoistureTop/SoilMoisture.xml | 24 +-
soilMoistureTop/SoilMoistureALg.py | 8 +-
soilMoistureTop/SoilMoistureMain.py | 2 +
soilMoisture_geo_sar/SoilMoistureMain.py | 2 +-
soilMoisture_geo_sar/SoilMoisture_geo.xml | 8 +-
.../SoilSalinityPredict.xml | 6 +-
soilSalinity/SoilSalinity.xml | 12 +-
soilSalinity/SoilSalinityMain.py | 10 +-
surfaceRoughness_oh2004/SurfaceRoughness.xml | 10 +-
.../SurfaceRoughnessTool.py | 8 +-
tool/algorithm/algtools/MetaDataHandler.py | 4 +
tool/algorithm/algtools/PreProcess.py | 22 +-
tool/algorithm/algtools/ROIAlg.py | 15 +-
tool/algorithm/algtools/filter/lee_Filter.py | 15 +
tool/algorithm/image/ImageHandle.py | 14 +-
tool/algorithm/ml/machineLearning.py | 2 +-
tool/algorithm/polsarpro/AHVToPolsarpro.py | 17 +-
.../polsarpro/pspSurfaceInversion.py | 4 +-
tool/algorithm/transforml1a/transHandle.py | 12 +-
vegetationPhenology/SurfaceRoughnessMain.spec | 33 ---
vegetationPhenology/VegetationPhenology.xml | 143 ----------
.../VegetationPhenologyAuxData.py | 37 +--
.../VegetationPhenologyMain.py | 3 +-
.../VegetationPhenology_C_SAR_V3.xml | 184 -------------
vegetationPhenology/testxmlreading.py | 8 +-
47 files changed, 1039 insertions(+), 670 deletions(-)
delete mode 100644 vegetationPhenology/SurfaceRoughnessMain.spec
delete mode 100644 vegetationPhenology/VegetationPhenology.xml
delete mode 100644 vegetationPhenology/VegetationPhenology_C_SAR_V3.xml
diff --git a/Ortho/OrthoAlg.py b/Ortho/OrthoAlg.py
index 911dbaf2..068c5444 100644
--- a/Ortho/OrthoAlg.py
+++ b/Ortho/OrthoAlg.py
@@ -8,6 +8,8 @@
@Date :2021/8/14
@Version :1.0.0
"""
+import glob
+
# from re import I, T, X, match
# from types import DynamicClassAttribute
# from numpy.core.einsumfunc import _parse_possible_contraction
@@ -56,7 +58,7 @@ class ScatteringAlg:
pass
@staticmethod
- def sar_backscattering_coef(in_sar_tif, meta_file_path, out_sar_tif, replece_VV = False, is_DB = True):
+ def sar_backscattering_coef(in_sar_tif, meta_file_path, out_sar_tif, inc_xml_path, replece_VV = False, is_DB = True):
# 读取原始SAR影像
proj, geotrans, in_data = ImageHandler.read_img(in_sar_tif)
@@ -103,6 +105,9 @@ class ScatteringAlg:
coef_arr[where_9999_1] = -9999
## 输出的SAR后向散射系数产品
# ImageHandler.write_img(out_sar_tif, proj, geotrans, coef_arr, 0)
+ _, _, inc_arr = ImageHandler().read_img(inc_xml_path)
+ rad_arr = np.radians(inc_arr)
+ sin_arr = np.sin(rad_arr)
tif_array = np.power(10.0, coef_arr / 10.0) # dB --> 线性值 后向散射系数
@@ -110,6 +115,7 @@ class ScatteringAlg:
tif_array[np.isinf(tif_array)] = 0
tif_array[where_9999_0] = 0
tif_array[where_9999_1] = 0
+ tif_array = tif_array / sin_arr
ImageHandler.write_img(out_sar_tif, proj, geotrans, tif_array, 0)
return True
@@ -122,6 +128,16 @@ class ScatteringAlg:
# db_arr[np.isinf(db_arr)] = -9999
ImageHandler.write_img(db_path, proj, geotrans, db_arr, -9999)
+ @staticmethod
+ def lin_to_db_top(lin_path, db_path):
+ proj, geotrans, in_data = ImageHandler.read_img(lin_path)
+ db_arr = 10 * np.log10(in_data)
+ db_arr[np.isnan(db_arr)] = 0
+ db_arr[np.isinf(db_arr)] = 0
+ # db_arr[np.isnan(db_arr)] = -9999
+ # db_arr[np.isinf(db_arr)] = -9999
+ ImageHandler.write_img(db_path, proj, geotrans, db_arr, '0')
+
@staticmethod
def sar_backscattering_coef_RPC(in_sar_tif, meta_file_path, out_sar_tif, replece_VV=False, is_DB=True):
@@ -174,6 +190,17 @@ class ScatteringAlg:
return True
+ @staticmethod
+ def get_incidence_xml_paths(file_dir, name):
+ meta_xml_paths = []
+ if os.path.exists(file_dir + name + '\\'):
+ meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.incidence.xml')))
+ else:
+ meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.incidence.xml')))
+ if meta_xml_paths is None or meta_xml_paths == []:
+ raise Exception('there is not .incidence.xml in path: ', file_dir + '\\')
+ return meta_xml_paths
+
def FindInfomationFromJson(HeaderFile_dom_json, node_path_list):
@@ -1086,6 +1113,38 @@ class DEMProcess(object):
# gdal.CloseDir(out_DEM)
return out_DEM
+ @staticmethod
+ def tif_merged(in_tif_path, temp_dir, out_tif_path):
+ '''
+ DEM重采样函数,默认坐标系为WGS84
+ agrs:
+ in_dem_path: 输入的DEM文件夹路径
+ meta_file_path: 输入的xml元文件路径
+ out_dem_path: 输出的DEM文件夹路径
+ '''
+ # 读取文件夹中所有的DEM
+ bsMap_file_paths = [os.path.join(in_tif_path, dem_name) for dem_name in os.listdir(in_tif_path) if
+ dem_name.find(".tif") >= 0 and dem_name.find(".tif.") == -1]
+ spatialreference = osr.SpatialReference()
+ spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree
+ spatialproj = spatialreference.ExportToWkt() # 导出投影结果
+
+ # # 将DEM拼接成一张大图
+ baseName = os.path.splitext(os.path.basename(bsMap_file_paths[0]))[0]
+ mergeFile = gdal.BuildVRT(os.path.join(temp_dir, f"{baseName}_VRT.tif"), bsMap_file_paths)
+ out_DEM = os.path.join(out_tif_path, f"{baseName.split('_Strip')[0]}-ortho.tif")
+ gdal.Warp(out_DEM,
+ mergeFile,
+ format="GTiff",
+ dstSRS=spatialproj,
+ srcNodata=0,
+ dstNodata=-9999,
+ outputType=gdal.GDT_Float32,
+ resampleAlg='average')
+ time.sleep(3)
+ # gdal.CloseDir(out_DEM)
+ return out_DEM
+
@staticmethod
def dem_resampled(in_dem_path,out_dem_path,samling_f):
in_dem=gdal.Open(in_dem_path,gdalconst.GA_ReadOnly)
@@ -1280,6 +1339,144 @@ class Orthorectification(object):
return HeaderInformation_json
pass
+ def ParseHearderFile_top(self, HeaderFilePath_str, pol_id):
+ """
+ 解析头文件,返回计算所需要的必要信息。
+ args:
+ HeaderFilePath_str: 头文件地址
+ return:
+ HeaderFileInfomation_json: 头文件信息包
+ """
+
+ with open(HeaderFilePath_str, 'r', encoding='utf-8') as fp:
+ HeaderFile_dom_str = fp.read()
+ HeaderFile_dom = xmltodict.parse(HeaderFile_dom_str) # 将XML转成json文本
+ HeaderFile_dom_json = json.loads(json.dumps(HeaderFile_dom)) # 将json字符串,转成json对象(对应于python中的dict)
+ # 获取头文件
+ HeaderInformation_json = {}
+ # 解析轨道节点,假定是GPS节点
+ HeaderInformation_json['GPS'] = []
+ # GPS 解析信息
+ GPSNode_Path = self.config['GPS']['GPSNode_Path']
+ GPSNodeNames_list = self.config['GPS']['NodeInfomation_Name']
+ GPSTime_Format = self.config['GPS']['Time_format']
+ GPSPoints = FindInfomationFromJson(HeaderFile_dom_json, GPSNode_Path)
+
+ for GPSPoint in GPSPoints:
+ GPSPoint = [
+ datetime.datetime.strptime(GPSPoint[GPSNodeNames_list[0]], GPSTime_Format).timestamp(), # TimeStamp
+ float(GPSPoint[GPSNodeNames_list[1]]), # Xp
+ float(GPSPoint[GPSNodeNames_list[2]]), # Yp
+ float(GPSPoint[GPSNodeNames_list[3]]), # Zp
+ float(GPSPoint[GPSNodeNames_list[4]]), # Vx
+ float(GPSPoint[GPSNodeNames_list[5]]), # Vy
+ float(GPSPoint[GPSNodeNames_list[6]])] # VZ
+ HeaderInformation_json['GPS'].append(GPSPoint)
+
+ # 提取成像相关信息
+ HeaderInformation_json['ImageInformation'] = {}
+ # 1、开始成像时间
+ HeaderInformation_json['ImageInformation']['StartTime'] = datetime.datetime.strptime(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['StartImageTime']['NodePath']),
+ self.config['imageinfo']['StartImageTime']['Format']
+ ).timestamp()
+ # 2、结束成像时间
+ HeaderInformation_json['ImageInformation']['EndTime'] = datetime.datetime.strptime(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['EndImageTime']['NodePath']),
+ self.config['imageinfo']['StartImageTime']['Format']
+ ).timestamp()
+ # 3、影像的宽高
+ temp = FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['ImageHeight']['NodePath'])
+ HeaderInformation_json['ImageInformation']['height'] = int(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['ImageHeight']['NodePath']).split(',')[
+ pol_id])
+ HeaderInformation_json['ImageInformation']['width'] = int(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['ImageWidth']['NodePath']).split(',')[
+ pol_id])
+ # 4、影像的近斜距
+ HeaderInformation_json['ImageInformation']['NearRange'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['NearRange']['NodePath']).split(',')[
+ pol_id])
+ # 5、入射角
+ HeaderInformation_json['ImageInformation']['incidenceAngle'] = {
+ 'NearRange': float(FindInfomationFromJson(HeaderFile_dom_json,
+ self.config['imageinfo']['incidenceAngle']['NearRange'][
+ 'NodePath'])),
+ 'FarRange': float(FindInfomationFromJson(HeaderFile_dom_json,
+ self.config['imageinfo']['incidenceAngle']['FarRange'][
+ 'NodePath']))
+ }
+ # 6、成像时刻影像带宽
+ bandWidth_list = self.config['sensor']['bandWidth']['NodePath']
+ bandWidth_list.insert(4, pol_id)
+ HeaderInformation_json['ImageInformation']['bandWidth'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, bandWidth_list))
+ # 7、多普勒质心常数
+ DopplerCentroidCoefficients_list = FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo'][
+ 'DopplerCentroidCoefficients']['NodePath'])
+ HeaderInformation_json['ImageInformation']['DopplerCentroidCoefficients'] = [
+ float(DopplerCentroidCoefficients_list[
+ self.config['imageinfo']['DopplerCentroidCoefficients']['DopplerCentroidCoefficients_Name'][0]]),
+ float(DopplerCentroidCoefficients_list[
+ self.config['imageinfo']['DopplerCentroidCoefficients']['DopplerCentroidCoefficients_Name'][1]]),
+ float(DopplerCentroidCoefficients_list[
+ self.config['imageinfo']['DopplerCentroidCoefficients']['DopplerCentroidCoefficients_Name'][2]]),
+ float(DopplerCentroidCoefficients_list[
+ self.config['imageinfo']['DopplerCentroidCoefficients']['DopplerCentroidCoefficients_Name'][3]]),
+ float(DopplerCentroidCoefficients_list[
+ self.config['imageinfo']['DopplerCentroidCoefficients']['DopplerCentroidCoefficients_Name'][4]]),
+ ]
+ # 8、波长
+ HeaderInformation_json['ImageInformation']['lambda'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['sensor']['lambda']['NodePath']))
+ # 9、中心像元对应的中心经纬度
+ CenterPoint = FindInfomationFromJson(HeaderFile_dom_json,
+ self.config['imageinfo']['CenterImagePositon']['NodePath'])
+ HeaderInformation_json['ImageInformation']['centerPosition'] = [
+ float(CenterPoint[self.config['imageinfo']['CenterImagePositon']['Value'][0]]), # 纬度
+ float(CenterPoint[self.config['imageinfo']['CenterImagePositon']['Value'][1]]), # 经度
+ ]
+ # 10、多普勒参数参考时间
+ HeaderInformation_json['ImageInformation']['DopplerParametersReferenceTime'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json,
+ self.config['imageinfo']['DopplerParametersReferenceTime']['NodePath']))
+ # 11、参考斜距
+ HeaderInformation_json['ImageInformation']['refRange'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['ReferenceRange']['NodePath']).split(
+ ',')[pol_id])
+ # 12、PRF
+ HeaderInformation_json['PRF'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['sensor']['PRF']['NodePath']).split(',')[pol_id])
+ HeaderInformation_json['Fs'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['sensor']['Fs']['NodePath']).split(',')[pol_id])
+ # 13、中心时间
+ HeaderInformation_json['ImageInformation']['CenterTime'] = datetime.datetime.strptime(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['CenterImageTime']['NodePath']),
+ self.config['imageinfo']['CenterImageTime']['Format']
+ ).timestamp()
+ # 14. 影像的空间间隔分辨率
+ HeaderInformation_json['ImageInformation']['ImageWidthSpace'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['ImageWidthSpace']['NodePath']).split(
+ ',')[pol_id])
+
+ HeaderInformation_json['ImageInformation']['ImageHeightSpace'] = float(
+ FindInfomationFromJson(HeaderFile_dom_json, self.config['imageinfo']['ImageHeightSpace']['NodePath']).split(
+ ',')[pol_id])
+
+ # 部分需要解析
+ self.lamda = HeaderInformation_json['ImageInformation']['lambda']
+ self.PRF = HeaderInformation_json['PRF']
+ self.imgwidth = HeaderInformation_json['ImageInformation']['width']
+ self.imgheight = HeaderInformation_json['ImageInformation']['height']
+ self.imgstarttime = HeaderInformation_json['ImageInformation']['StartTime']
+ self.widthspace = HeaderInformation_json['ImageInformation']['ImageWidthSpace']
+ self.heightspace = HeaderInformation_json['ImageInformation']['ImageHeightSpace']
+ self.refrange = HeaderInformation_json['ImageInformation']['refRange']
+ self.nearrange = HeaderInformation_json['ImageInformation']['NearRange']
+ self.Fs = HeaderInformation_json['Fs'] * 1e6 # Mhz
+ return HeaderInformation_json
+ pass
+
def LLA_to_XYZ(self, latitude, longitude, altitude):
"""
经纬度转大地坐标系
@@ -1617,6 +1814,45 @@ class IndirectOrthorectification(Orthorectification):
self.PrepareConvertSystem()
self.outParameterText(os.path.join(workspace_dir,"orth_para.txt"))
+
+ def IndirectOrthorectification_top(self, xml_path, tif_path, workspace_dir):
+ """
+ 正射校正组件
+ 正射校正整体可以分为两个步骤:
+ Step 1:计算出栅格坐标对应的真实的大地坐标系坐标
+ Step 2:根据大地坐标系将影像重采样到大地坐标系空间中
+ args:
+ FilePath_str:影像所在文件夹
+ dem_resampled_path:重采样后的DEM路径
+ lut_tif_path:输出的r,c,ti数据影像
+ work_temp_path: 输出的临时文件地址路径,方便存放计算临时文件
+ """
+ # 分离需要校正对象
+ pol_id = os.path.splitext(os.path.basename(tif_path))[0].split('_')[-1]
+ # 解析头文件
+ self.header_info = self.ParseHearderFile_top(xml_path, int(pol_id))
+ # 构建轨道模型
+ self.SatelliteOrbitModel = ReconstructionSatelliteOrbit(self.header_info['GPS'],
+ starttime=self.header_info['ImageInformation'][
+ 'CenterTime']) # 构建卫星轨道模型,取第0个节点的时间
+ # 获取所有轨道节点时间
+ gpslist = np.array(self.header_info['GPS']).reshape(-1, 7)
+ # 验证轨道模型
+ tempp = gpslist[:, 0].reshape(-1)
+ statepoints = self.SatelliteOrbitModel.getSatelliteSpaceState(gpslist[:, 0].reshape(-1))
+ # 计算轨道差距
+ statepoints_dist = statepoints - gpslist[:, 1:]
+ statepoints_dist_line = statepoints_dist[:, :3]
+ statepoints_dist_line = np.sum(statepoints_dist_line ** 2, axis=1) ** 0.5
+
+ statepoints_dist_ver = np.sum(statepoints_dist[:, 3:] ** 2, axis=1) ** 0.5
+ print(
+ "sata Point distance:\t{}\t -\t{}\t ".format(np.min(statepoints_dist_line), np.max(statepoints_dist_line)))
+
+ # 求解模型前计算常数变换模型
+ self.PrepareConvertSystem()
+ self.outParameterText(os.path.join(workspace_dir, "orth_para.txt"))
+
def preCaldem_sar_rc(self,dem_path,ori_sar_path,work_path,taget_path):
#.\SIMOrthoProgram\x64\Release\SIMOrthoProgram.exe 1 parameter_path dem_path ori_sar_path work_path taget_path
# SIMOrthoProgram.exe 1 in_parameter_path in_dem_path in_ori_sar_path in_work_path in_taget_path
@@ -3026,6 +3262,14 @@ class IndirectOrthorectification(Orthorectification):
'''
if __name__ == '__main__':
- file_path = r"D:\BaiduNetdiskDownload\S1\20240726\ortho.tif"
- out_path = r"D:\BaiduNetdiskDownload\S1\20240726\ortho_register.tif"
- ScatteringAlg().lin_to_db(file_path, out_path)
+ # file_path = r"D:\BaiduNetdiskDownload\S1\20240726\ortho.tif"
+ # out_path = r"D:\BaiduNetdiskDownload\S1\20240726\ortho_register.tif"
+ # ScatteringAlg().lin_to_db(file_path, out_path)
+ # 正无穷大
+ positive_infinity = 1.0 / 0.0
+ print(positive_infinity) # 输出: inf
+
+ # 负无穷大
+ negative_infinity = -1.0 / 0.0
+ print(negative_infinity) # 输出: -inf
+
diff --git a/Ortho/OrthoMain.py b/Ortho/OrthoMain.py
index 8b07528a..28a997c9 100644
--- a/Ortho/OrthoMain.py
+++ b/Ortho/OrthoMain.py
@@ -38,7 +38,7 @@ else:
DEBUG = False
EXE_NAME = cf.get('exe_name')
productLevel = cf.get('productLEVEL')
-tager = '-' + cf.get('tager')
+tager = '-' + cf.get('target')
#env_str = os.getcwd()
env_str =os.path.dirname(os.path.abspath(sys.argv[0])) #os.path.split(os.path.realpath(__file__))[0]
os.environ['PROJ_LIB'] = env_str
@@ -151,10 +151,12 @@ class OrthoMain:
self.__input_paras = self.__alg_xml_handler.get_input_paras()
# self.__output_paras = self.__alg_xml_handler.get_output_paras()
self.__create_work_space()
+ self.__out_name = os.path.splitext(os.path.splitext(os.path.basename(self.__input_paras['SLC']['ParaValue']))[0])[0]
+
self.__in_processing_paras = self.__init_processing_paras(self.__input_paras) # 输入{paraname:paravalue}
# self.__out_processing_paras = self.__init_processing_paras(self.__output_paras) # 输入{paraname:paravalue}
- self.__out_name = os.path.splitext(os.path.splitext(os.path.basename(self.__input_paras['SLC']['ParaValue']))[0])[0]
+
# AlgorithmName = self.__alg_xml_handler.get_algorithm_name()
# TaskId = self.__alg_xml_handler.get_task_id()
result_name = self.__out_name + ".tar.gz"
@@ -290,8 +292,11 @@ class OrthoMain:
processing_paras.update({name: para_path})
if para['DataType'] == 'tar.gz':
para_path = os.path.join(self.__workspace_path, para['ParaValue'])
- tar_gz_dic = self.__dec_tar_gz(name, para_path, self.__workspace_unpack_path)
- processing_paras.update(tar_gz_dic)
+ if 'TOPN' in self.__out_name:
+ self.__TOP_dec_tar_gz(name, para_path, self.__workspace_unpack_path)
+ else:
+ tar_gz_dic = self.__dec_tar_gz(name, para_path, self.__workspace_unpack_path)
+ processing_paras.update(tar_gz_dic)
if para['DataType'] == 'tif' or para['DataType'] == 'tiff': # 新增修改dem数据为文件绝对路径
if para['ParaValue'] != 'empty' and para['ParaValue'] != 'Empty' and para['ParaValue'] != '':
para_path_list = para['ParaValue'].split(";")
@@ -323,6 +328,69 @@ class OrthoMain:
processing_paras.update({name: value})
return processing_paras
+ def __TOP_dec_tar_gz(self, name1, tar_gz_path, out_dir):
+ """
+ 解压.tar_gz格式景影像文件
+ :param tar_gz_path:.tar_gz文件路径
+ :param out_dir:输出文件夹
+ :return para_dic:全极化影像路径
+ """
+ # 创建文件夹
+ name = os.path.split(tar_gz_path)[1].rstrip('.tar.gz')
+ file_dir = os.path.join(out_dir, name + '\\')
+ if os.path.exists(file_dir) is False:
+ os.makedirs(file_dir)
+ # 解压
+ t = tarfile.open(tar_gz_path)
+ t.extractall(path=file_dir)
+
+ para_dic = {}
+
+ tif_files = list(glob.glob(os.path.join(file_dir, '*.tiff')))
+ tif_files += list(glob.glob(os.path.join(file_dir, '*.tif')))
+ hh_list, hv_list, vh_list, vv_list, dh_list = [], [], [], [], []
+ for in_tif_path in tif_files:
+ file_baseName = os.path.splitext(os.path.basename(in_tif_path))[0]
+
+ if 'hh' in file_baseName or 'HH' in file_baseName:
+ hh_list.append(in_tif_path)
+ self.polsar_list.append('HH')
+ elif 'hv' in file_baseName or 'HV' in file_baseName:
+ hv_list.append(in_tif_path)
+ self.polsar_list.append('HV')
+ elif 'vh' in file_baseName or 'VH' in file_baseName:
+ vh_list.append(in_tif_path)
+ self.polsar_list.append('VH')
+ elif 'vv' in file_baseName or 'VV' in file_baseName:
+ vv_list.append(in_tif_path)
+ self.polsar_list.append('VV')
+ elif "DH" in os.path.basename(in_tif_path):
+ dh_list.append(in_tif_path)
+ self.polsar_list.append('DH')
+ self.polsar_list = list(set(self.polsar_list))
+ para_dic.update({'HH': hh_list})
+ para_dic.update({'HV': hv_list})
+ para_dic.update({'VH': vh_list})
+ para_dic.update({'VV': vv_list})
+ para_dic.update({'DH': dh_list})
+
+ # 获取文件夹内的文件
+
+ if os.path.exists(file_dir + name + '\\'):
+ meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.meta.xml')))
+ para_dic.update({'SLC': file_dir + name})
+ else:
+ meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.meta.xml')))
+ para_dic.update({'SLC': file_dir})
+
+ if meta_xml_paths == []:
+ raise Exception('there is not .meta.xml in path: ', file_dir + '\\')
+ para_dic.update({'META': meta_xml_paths[0]})
+ self.image_meta_xml = meta_xml_paths
+ # para_dic.update({name1: file_dir}) # {SLC: file_path}
+
+ return para_dic
+
def __dec_tar_gz(self, name1, tar_gz_path, out_dir):
"""
解压.tar_gz格式景影像文件
@@ -351,6 +419,7 @@ class OrthoMain:
if meta_xml_paths == []:
raise Exception('there is not .meta.xml in path: ', file_dir + '\\')
para_dic.update({'META': meta_xml_paths[0]})
+ para_dic.update({'incXML': alg.get_incidence_xml_paths(file_dir, name)[0]})
self.image_meta_xml = meta_xml_paths
# para_dic.update({name1: file_dir}) # {SLC: file_path}
@@ -390,8 +459,10 @@ class OrthoMain:
elif CorrectMethod.get('CorrectMethod') == '2' or CorrectMethod.get('CorrectMethod') == 2:
logger.info("CorrectMethod is RD!")
-
- return self.RD_process_handle()
+ if 'TOPN' in self.__out_name:
+ return self.RD_TOP_process_handle()
+ else:
+ return self.RD_process_handle()
else:
raise Exception('No CorrectMethod')
@@ -595,11 +666,22 @@ class OrthoMain:
ortho_bsMap_path = self.__workspace_baseMap_path
bsMap_merged_path = DEMProcess.bsMap_merged(bsMap, meta_file_path, ortho_bsMap_path)
+
dem_path = self.cut_dem(dem_merged_path, meta_file_path)
# 2、间接定位法求解行列坐标
slc_paths = self.__in_processing_paras["SLC"]
+
+ in_slc_path = None
+ for slc_path in os.listdir(slc_paths):
+ if slc_path.find(".tiff") > 0 and (
+ slc_path.find("_HH_") > 0 or slc_path.find("_VV_") > 0 or slc_path.find("_DH_") > 0):
+ in_slc_path = os.path.join(slc_paths, slc_path)
+ break
+ inc_xml_path = self.__in_processing_paras['incXML']
+ rows = self.imageHandler.get_img_height(in_slc_path)
+ inc_path = os.path.join(self.__workspace_Temporary_path, 'inc_angle.tif')
+ ImageHandler.get_inc_angle(inc_xml_path, rows, 1, inc_path)
# 2.1 生成映射表
- slc_path=os.path.join(slc_paths,os.listdir(slc_paths)[0])
# 2.2 生成局地入射角
path2 = env_str
@@ -608,11 +690,7 @@ class OrthoMain:
# 2.3 输出结果
# 3 处理RD
- in_slc_path=None
- for slc_path in os.listdir(slc_paths):
- if slc_path.find(".tiff")>0 and (slc_path.find("_HH_")>0 or slc_path.find("_VV_")>0 or slc_path.find("_DH_")>0):
- in_slc_path=os.path.join(slc_paths,slc_path)
- break
+
# 获取校正模型后
out_dir_path = self.__workspace_package_path.replace("\\", "\\\\")
Orthorectification.preCaldem_sar_rc(dem_path,in_slc_path,self.__workspace_Temporary_path,self.__workspace_package_path.replace("\\","\\\\")) # 初步筛选坐标范围
@@ -675,10 +753,6 @@ class OrthoMain:
GTC_rc_path=os.path.join(self.__workspace_package_path,"ori_sim-ortho.tif")
GTC_out_path=self.__workspace_package_path
-
- # dem_rc = os.path.join(self.__workspace_package_path, "sim_ori-ortho.tif")
- # dem_rc_pro = self.process_sim_ori(this_in_rpc_lon_lat_path, dem_rc)
- # shutil.move(dem_rc_pro, dem_rc)
parameter_path = os.path.join(self.__workspace_package_path, "orth_para.txt")
in_tif_paths = list(glob.glob(os.path.join(slc_paths, '*.tiff')))
for in_tif_path in in_tif_paths:
@@ -686,7 +760,7 @@ class OrthoMain:
slc_path_temp=os.path.join(slc_paths,in_tif_path)
out_power_path=os.path.join(self.__workspace_Temporary_path,slc_path_temp.replace(".tiff","-lin.tif").replace("L1A","L1B")).replace("HH","h_h").replace("HV","h_v").replace("VH","v_h").replace("VV","v_v").replace("DH","d_h")
# out_power_path=os.path.join(self.__workspace_Temporary_path,slc_path_temp.replace(".tiff","_db.tif"))
- alg.sar_backscattering_coef(slc_path_temp,self.__in_processing_paras['META'],out_power_path)
+ alg.sar_backscattering_coef(slc_path_temp,self.__in_processing_paras['META'],out_power_path, inc_path)
lin_tif_path = os.path.join(self.__workspace_Temporary_path,
os.path.basename(out_power_path).split('-')[0] + "-lin_geo.tif")
@@ -760,6 +834,157 @@ class OrthoMain:
logger.info('progress bar :100%')
return True
pass
+
+ def RD_TOP_process_handle(self):
+ # RPC
+ logger.info(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
+ # print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
+ # 1、DEM拼接、裁剪、重采样
+ Orth_Slc = []
+ in_dem_path = self.__in_processing_paras['DEM']
+ meta_file_path = self.__in_processing_paras['META'] # .meta文件路径
+ out_dem_path = self.__workspace_ResampledDEM_path
+ dem_merged_path = DEMProcess.dem_merged(in_dem_path, meta_file_path,
+ out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif
+
+ # bsMap = self.__in_processing_paras['baseMap']
+ # ortho_bsMap_path = self.__workspace_baseMap_path
+ # bsMap_merged_path = DEMProcess.bsMap_merged(bsMap, meta_file_path, ortho_bsMap_path)
+
+ dem_path = self.cut_dem(dem_merged_path, meta_file_path)
+ # 2、间接定位法求解行列坐标
+ sim_ori_map = {}
+ patamter_map = {}
+ inc_angle_dir = os.path.join(self.__workspace_Temporary_path, f"incAngle")
+ localincidentAngle_dir = os.path.join(self.__workspace_Temporary_path, f"localincidentAngle")
+ if not os.path.exists(inc_angle_dir):
+ os.makedirs(inc_angle_dir)
+ if not os.path.exists(localincidentAngle_dir):
+ os.makedirs(localincidentAngle_dir)
+ slc_paths = self.__in_processing_paras[self.polsar_list[0]]
+ for slc_path in slc_paths:
+ baseName = os.path.splitext(os.path.basename(slc_path))[0]
+
+ temp_dir = os.path.join(self.__workspace_Temporary_path, baseName)
+ if not os.path.exists(temp_dir):
+ os.makedirs(temp_dir)
+
+ path2 = env_str
+ Orthorectification = IndirectOrthorectification(os.path.join(path2, "config.yaml"))
+
+ Orthorectification.IndirectOrthorectification_top(self.__in_processing_paras["META"], slc_path,
+ temp_dir) # 改动1
+
+ Orthorectification.preCaldem_sar_rc(dem_path, slc_path, self.__workspace_Temporary_path, temp_dir)
+
+ strip_id = baseName.split('_')[-1]
+ RD_sim_ori = os.path.join(temp_dir, 'RD_sim_ori.tif')
+ sim_ori_ortho = os.path.join(self.__workspace_package_path, f"sim_ori_strip_{strip_id}-ortho.tif")
+ shutil.move(RD_sim_ori, sim_ori_ortho)
+ sim_ori_map.update({'sim_ori_strip_' + strip_id: sim_ori_ortho})
+
+ paramter_path = os.path.join(temp_dir, 'orth_para.txt')
+ patamter_map.update({'paramter_strip_' + strip_id: paramter_path})
+
+ inc_angle_strip = os.path.join(temp_dir, 'incidentAngle.tiff')
+ inc_angle_merged = os.path.join(inc_angle_dir, f"incidentAngle_Strip_{strip_id}.tif")
+ shutil.move(inc_angle_strip, inc_angle_merged)
+
+ localincidentAngle_strip = os.path.join(temp_dir, 'localincidentAngle.tiff')
+ localincidentAngle_merged = os.path.join(localincidentAngle_dir, f"localincidentAngle_Strip_{strip_id}.tif")
+ shutil.move(localincidentAngle_strip, localincidentAngle_merged)
+
+ DEMProcess.tif_merged(inc_angle_dir, self.__workspace_origin_path, self.__workspace_package_path)
+ DEMProcess.tif_merged(localincidentAngle_dir, self.__workspace_origin_path, self.__workspace_package_path)
+
+ tif_db_dir_list = []
+ for polr in self.polsar_list:
+ tifFiles = self.__in_processing_paras[polr]
+ tif_db_dir = os.path.join(self.__workspace_Temporary_path, f"{polr}_db")
+ if not os.path.exists(tif_db_dir):
+ os.makedirs(tif_db_dir)
+ tif_db_dir_list.append(tif_db_dir)
+ for tif in tifFiles:
+ baseName = os.path.splitext(os.path.basename(tif))[0]
+ strip_id = baseName.split('_')[-1]
+ inc_xml_path = os.path.join(os.path.dirname(slc_path), baseName + '.incidence.xml')
+ inc_path = os.path.join(self.__workspace_Temporary_path, baseName + '_angle.tif')
+ rows = self.imageHandler.get_img_height(slc_path)
+ ImageHandler.get_inc_angle(inc_xml_path, rows, 1, inc_path)
+
+ out_power_path = os.path.join(self.__workspace_Temporary_path,
+ os.path.basename(tif).replace(".tiff", "-lin.tif").replace("L1A",
+ "L1B")).replace(
+ "HH", "h_h").replace("HV", "h_v").replace("VH", "v_h").replace("VV", "v_v").replace("DH", "d_h")
+
+ alg.sar_backscattering_coef(tif, self.__in_processing_paras['META'], out_power_path, inc_path)
+
+ lin_tif_path = os.path.join(self.__workspace_Temporary_path,
+ os.path.basename(out_power_path).split('-')[0] + "-lin_geo.tif")
+
+ Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(patamter_map[f"paramter_strip_{strip_id}"],
+ sim_ori_map[f"sim_ori_strip_{strip_id}"],
+ out_power_path,
+ lin_tif_path)
+ tempout_tif_path = os.path.join(tif_db_dir,
+ os.path.basename(lin_tif_path).split('-')[0] + '.tif')
+ alg.lin_to_db_top(lin_tif_path, tempout_tif_path) # 线性值转回DB值
+
+ for tif_merged in tif_db_dir_list:
+ DEMProcess.tif_merged(tif_merged, self.__workspace_origin_path, self.__workspace_package_path)
+
+ for tiff_name in os.listdir(self.__workspace_package_path):
+ if (tiff_name.find(".tiff") > 0 or tiff_name.find(".tif") > 0) and 'sim_ori' not in tiff_name:
+ self.imageHandler.write_quick_view(os.path.join(self.__workspace_package_path, tiff_name))
+
+
+ tem_folder = self.__workspace_path + EXE_NAME + r"\Temporary""\\"
+ image_path = tempout_tif_path # os.path.join(self.__workspace_package_path, "OrthoMapTable.tif")
+ out_path1 = os.path.join(tem_folder, "trans_geo_projcs.tif")
+ out_path2 = os.path.join(tem_folder, "trans_projcs_geo.tif")
+ model_path = "./product.xml"
+ meta_xml_path = os.path.join(self.__workspace_package_path,
+ os.path.basename(self.__out_para).replace(".tar.gz", ".meta.xml"))
+
+ para_dict = CreateMetaDict(image_path, self.__in_processing_paras['META'], self.__workspace_package_path,
+ out_path1, out_path2).calu_nature()
+
+ Azimuth = os.path.join(self.__workspace_Temporary_path, 'Azimuth.txt')
+ if os.path.exists(Azimuth):
+ Azimuth_incidence = OrthoAzimuth.get_Azimuth_incidence(Azimuth)
+ else:
+ logger.error("read Azimuth txt failed!")
+ Azimuth_incidence = 'None'
+
+ para_dict.update({"ObservationGeometry_SatelliteAzimuth": Azimuth_incidence})
+ para_dict.update({"imageinfo_ProductName": '正射校正'})
+ para_dict.update({"imageinfo_ProductIdentifier": 'Ortho'})
+ para_dict.update({"imageinfo_ProductLevel": productLevel})
+ para_dict.update({"ProductProductionInfo_BandSelection": "1,2"})
+ para_dict.update({"ProductProductionInfo_AuxiliaryDataDescription": "DEM"})
+ CreateProductXml(para_dict, model_path, meta_xml_path).create_standard_xml()
+
+ temp_folder = os.path.join(self.__workspace_path, EXE_NAME, 'Output')
+ out_xml = os.path.join(temp_folder, os.path.basename(meta_xml_path))
+ if os.path.exists(temp_folder) is False:
+ os.mkdir(temp_folder)
+ # CreateProductXml(para_dict, model_path, out_xml).create_standard_xml()
+ shutil.copy(meta_xml_path, out_xml)
+
+ # 生成压缩包
+ logger.info('progress bar :94%')
+ logger.info('start make targz..')
+ # self.del_floder(self.__workspace_unpack_path)
+ # self.del_floder(self.__workspace_ResampledDEM_path)
+ # self.del_floder(self.__workspace_LutImg_path)
+ # self.del_floder(self.__workspace_IncidenceImg_path)
+ # self.del_floder(self.__workspace_SimImg_path)
+ # self.del_floder(self.__workspace_SARIntensity_path)
+ self.make_targz(self.__out_para, self.__workspace_package_path + "\\")
+ logger.info('make targz finish')
+ logger.info('progress bar :100%')
+ return True
+ pass
if __name__ == '__main__':
diff --git a/Ortho/Ortho_C_SAR_V3.xml b/Ortho/Ortho_C_SAR_V3.xml
index 03016058..327d591f 100644
--- a/Ortho/Ortho_C_SAR_V3.xml
+++ b/Ortho/Ortho_C_SAR_V3.xml
@@ -49,7 +49,7 @@
DEFAULT
DEFAULT
- F:\isce_yuan\lancover-yuan\GF3B_KSC_QPSI_010328_E86.1_N44.5_20231109_L1A_AHV_L10000262134.tar.gz
+ G:\UFS\GF3B_MYC_UFS_003192_E116.6_N39.5_20220702_L1A_DH_L10000059009.tar.gz
True
False
File
@@ -67,7 +67,7 @@
DEFAULT
DEFAULT
Cal
- F:\xibei_LandCover\dem.tif
+ G:\UFS\dem\115E39N_COP30.tif
True
True
File
@@ -86,7 +86,7 @@
DEFAULT
DEFAULT
- F:\isce_yuan\lancover-yuan\S1A_map
+ G:\UFS\SIA_GBM
True
False
File
@@ -121,7 +121,7 @@
File
tar.gz
Cal
- D:\micro\WorkSpace\ortho\Output\GF3B_KSC_QPSI_010328_E86.1_N44.5_20231109_L1A_AHV_L10000262134-ortho.tar.gz
+ D:\micro\WorkSpace\ortho\Output\GF3B_MYC_UFS_003192_E116.6_N39.5_20220702_L1A_DH_L10000059009-ortho.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/Ortho/config.ini b/Ortho/config.ini
index f66a1bac..564145f0 100644
--- a/Ortho/config.ini
+++ b/Ortho/config.ini
@@ -2,7 +2,7 @@
# 定义config分组
[config]
######1-算法基本参数######
-tager = ortho
+target = ortho
productLEVEL = 3
# 算法名称。修改临时工作区生成临时文件的名称,日志名称;
exe_name = ortho
diff --git a/backScattering/BackScatteringAlg.py b/backScattering/BackScatteringAlg.py
index 321707a2..1f61f254 100644
--- a/backScattering/BackScatteringAlg.py
+++ b/backScattering/BackScatteringAlg.py
@@ -8,7 +8,9 @@
@Date :2021/8/14
@Version :1.0.0
"""
+import glob
import zipfile
+from xml.etree.ElementTree import ElementTree
# from re import I, T, X, match
# from types import DynamicClassAttribute
@@ -57,10 +59,11 @@ class ScatteringAlg:
pass
@staticmethod
- def sar_backscattering_coef(in_sar_tif, meta_file_path, out_sar_tif, replece_VV = False, is_DB = True):
+ def sar_backscattering_coef(in_sar_tif, meta_file_path, out_sar_tif, inc_xml_path, replece_VV = False, is_DB = True):
# 读取原始SAR影像
proj, geotrans, in_data = ImageHandler.read_img(in_sar_tif)
+ rows = ImageHandler.get_img_height(in_sar_tif)
# 计算强度信息
I = np.array(in_data[0], dtype="float32")
@@ -103,8 +106,12 @@ class ScatteringAlg:
coef_arr[np.isinf(coef_arr)] = -9999
coef_arr[where_9999_0] = -9999
coef_arr[where_9999_1] = -9999
+ coef_arr[coef_arr < -25] = -25
## 输出的SAR后向散射系数产品
# ImageHandler.write_img(out_sar_tif, proj, geotrans, coef_arr, 0)
+ _, _, inc_arr = ImageHandler().read_img(inc_xml_path)
+ rad_arr = np.radians(inc_arr)
+ sin_arr = np.sin(rad_arr)
tif_array = np.power(10.0, coef_arr / 10.0) # dB --> 线性值 后向散射系数
@@ -112,11 +119,27 @@ class ScatteringAlg:
tif_array[np.isinf(tif_array)] = 0
tif_array[where_9999_0] = 0
tif_array[where_9999_1] = 0
-
+ tif_array_cos = tif_array / sin_arr
ImageHandler.write_img(out_sar_tif, proj, geotrans, tif_array, 0)
+ # out_sar_tif1 = r'D:\micro\WorkSpace\BackScattering\cal_cos.tif'
+ # ImageHandler.write_img(out_sar_tif1, proj, geotrans, tif_array_cos, 0)
return True
+ @staticmethod
+ def get_inc_angle(inc_xml, rows, out_path):
+ tree = ElementTree()
+ tree.parse(inc_xml) # 影像头文件
+ root = tree.getroot()
+ values = root.findall('incidenceValue')
+ angle_value = [value.text for value in values]
+ angle_value = np.array(angle_value)
+ inc_angle = np.tile(angle_value, (rows, 1))
+ ImageHandler.write_img(out_path, '', [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], inc_angle)
+
+ pass
+
+
@staticmethod
def sar_backscattering_coef_RPC(in_sar_tif, meta_file_path, out_sar_tif, replece_VV=False, is_DB=True):
@@ -177,6 +200,17 @@ class ScatteringAlg:
# db_arr[np.isinf(db_arr)] = -9999
ImageHandler.write_img(db_path, proj, geotrans, db_arr, -9999)
+ @staticmethod
+ def get_incidence_xml_paths(file_dir, name):
+ meta_xml_paths = []
+ if os.path.exists(file_dir + name + '\\'):
+ meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.incidence.xml')))
+ else:
+ meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.incidence.xml')))
+ if meta_xml_paths is None or meta_xml_paths == []:
+ raise Exception('there is not .incidence.xml in path: ', file_dir + '\\')
+ return meta_xml_paths
+
def FindInfomationFromJson(HeaderFile_dom_json, node_path_list):
"""
diff --git a/backScattering/BackScatteringMain.py b/backScattering/BackScatteringMain.py
index 1a355c93..67d2e341 100644
--- a/backScattering/BackScatteringMain.py
+++ b/backScattering/BackScatteringMain.py
@@ -185,7 +185,7 @@ class ScatteringMain:
else:
meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.meta.xml')))
para_dic.update({'SLC': file_dir})
-
+ para_dic.update({'incXML': alg.get_incidence_xml_paths(file_dir, name)[0]})
if meta_xml_paths == []:
raise Exception('there is not .meta.xml in path: ', file_dir + '\\')
para_dic.update({'META': meta_xml_paths[0]})
@@ -376,6 +376,10 @@ class ScatteringMain:
Orth_Slc = []
in_dem_path = self.__in_processing_paras['DEM']
meta_file_path = self.__in_processing_paras['META'] # .meta文件路径
+ inc_xml_path = self.__in_processing_paras['incXML']
+ rows = self.imageHandler.get_img_height(in_tif_paths[0])
+ inc_path = os.path.join(self.__workspace_preprocessing_path, 'inc_angle.tif')
+ ImageHandler.get_inc_angle(inc_xml_path, rows,1, inc_path)
out_dem_path = self.__workspace_preprocessing_path
# unzipped_folder_path = DEMProcess.unzip_file(in_dem_path, self.__workspace_origin_path) # 解压DEM.zip
dem_merged_path = DEMProcess.dem_merged(in_dem_path, meta_file_path,
@@ -470,7 +474,7 @@ class ScatteringMain:
if ('HH' in os.path.basename(in_tif_path)) or ('HV' in os.path.basename(in_tif_path)) or (
'VH' in os.path.basename(in_tif_path)) or ('VV' in os.path.basename(in_tif_path)) or (
'DH' in os.path.basename(in_tif_path)):
- alg.sar_backscattering_coef(in_tif_path, meta_file_path, out_tif_path)
+ alg.sar_backscattering_coef(in_tif_path, meta_file_path, out_tif_path, inc_path)
# 构建RPC
# 查找RPC
rpc_path = in_tif_path.replace(".tiff", ".rpc") if os.path.exists(
@@ -494,6 +498,7 @@ class ScatteringMain:
Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, sim_ori_rpc,
out_tif_path,
lin_tif_path)
+
tempout_tif_path = os.path.join(self.__workspace_processing_path,
os.path.splitext(os.path.basename(in_tif_path))[0]) + tager + r".tif"
alg.lin_to_db(lin_tif_path, tempout_tif_path) # 线性值转回DB值
diff --git a/backScattering/BackScattering_C_SAR_V3.xml b/backScattering/BackScattering_C_SAR_V3.xml
index 36665cac..71474226 100644
--- a/backScattering/BackScattering_C_SAR_V3.xml
+++ b/backScattering/BackScattering_C_SAR_V3.xml
@@ -42,7 +42,7 @@
File
tar.gz
Cal
- F:\MicroWorkspace\20240826Ortho\GF3B_MH1_UFS_012705_E116.4_N44.2_20240422_L1A_DH_L10000343416.tar.gz
+ F:\MicroWorkspace\Micro\neimenggu\GF3_MDJ_QPSI_031847_E116.4_N43.9_20220828_L1A_AHV_L10006708819.tar.gz
True
False
File
@@ -55,9 +55,9 @@
DEM数字高程影像
30m分辨率DEM数字高程影像
File
- File
+ tif
Cal
- F:\MicroWorkspace\20240826Ortho\dem
+ F:\MicroWorkspace\Micro\neimenggu\DEM\115E39N_COP30.tif
True
True
File
@@ -72,7 +72,7 @@
File
File
Cal
- F:\MicroWorkspace\20240826Ortho\S1GBM
+ F:\MicroWorkspace\Micro\neimenggu\S1GBM
True
True
File
@@ -89,7 +89,7 @@
File
tar.gz
Cal
- D:\micro\WorkSpace\BackScattering\Output\GF3B_MH1_UFS_012705_E116.4_N44.2_20240422_L1A_DH_L10000343416-cal.tar.gz
+ D:\micro\WorkSpace\BackScattering\Output\GF3_MDJ_QPSI_031847_E116.4_N43.9_20220828_L1A_AHV_L10006708819-cal.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/dem-C-SAR/Dem.xml b/dem-C-SAR/Dem.xml
index 93abcc6a..e28043ea 100644
--- a/dem-C-SAR/Dem.xml
+++ b/dem-C-SAR/Dem.xml
@@ -38,7 +38,7 @@
Value
string
Man
- 20161129
+ 20200218
True
False
UploadInput
@@ -54,8 +54,8 @@
zip
Man
- D:\micro\microproduct_depdence\GF3-Deformation\download\cls\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686.tar.gz;
- D:\micro\microproduct_depdence\GF3-Deformation\download\cls\GF3_KAS_FSI_002034_E113.4_N34.7_20161228_L1A_HHHV_L10002077539.tar.gz
+ G:\GF3极化干涉数据\2\GF3_SAY_QPSI_018565_E104.1_N30.6_20200218_L1A_AHV_L10004624982.tar.gz;
+ G:\GF3极化干涉数据\2\GF3_SAY_QPSI_018983_E104.1_N30.6_20200318_L1A_AHV_L10004680879.tar.gz
True
False
@@ -71,7 +71,7 @@
value
string
Man
- 34.6;34.63;113.1;113.15
+ empty
True
True
UploadInput
@@ -86,7 +86,7 @@
File
File
Man
- D:\micro\microproduct_depdence\GF3-Deformation\dem
+ G:\GF3极化干涉数据\DEM
True
False
File
@@ -120,7 +120,7 @@
File
tar.gz
Cal
- D:\micro\WorkSpace\Dem\Output\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686-DEM.tar.gz
+ D:\micro\WorkSpace\Dem\Output\GF3_SAY_QPSI_018565_E104.1_N30.6_20200218_L1A_AHV_L10004624982-DEM.tar.gz
diff --git a/dem-C-SAR/DemMain.py b/dem-C-SAR/DemMain.py
index 4dcf1319..f8a6c5ac 100644
--- a/dem-C-SAR/DemMain.py
+++ b/dem-C-SAR/DemMain.py
@@ -562,7 +562,7 @@ if __name__ == '__main__':
start = datetime.datetime.now()
try:
if len(sys.argv) < 2:
- xml_path = r'Dem_C_SAR_V3.xml'
+ xml_path = r'Dem.xml'
else:
xml_path = sys.argv[1]
Main = DemMain(xml_path)
@@ -583,3 +583,4 @@ if __name__ == '__main__':
+
diff --git a/dem-C-SAR/DemMain.spec b/dem-C-SAR/DemMain.spec
index e852683b..3bba9222 100644
--- a/dem-C-SAR/DemMain.spec
+++ b/dem-C-SAR/DemMain.spec
@@ -1,6 +1,30 @@
# -*- mode: python ; coding: utf-8 -*-
+import sys
+from shutil import copy
+import os
+
+cwdpath = os.getcwd()
+toolDir = os.path.join(cwdpath, 'tool')
+if os.path.exists(toolDir):
+ os.remove(toolDir)
+os.mkdir(toolDir)
+source_folder = '../tool'
+
+def copy_file(path_read, path_write):
+ names = os.listdir(path_read)
+ for name in names:
+ path_read_new = os.path.join(path_read, name)
+ path_write_new = os.path.join(path_write, name)
+ if os.path.isdir(path_read_new):
+ if not os.path.exists(path_write_new):
+ os.mkdir(path_write_new)
+ copy_file(path_read_new, path_write_new)
+ else:
+ copy(path_read_new, path_write_new)
+
+copy_file(source_folder, toolDir)
block_cipher = None
diff --git a/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Location/Offset.py b/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Location/Offset.py
index 96960f4d..85b88269 100644
--- a/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Location/Offset.py
+++ b/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Location/Offset.py
@@ -395,117 +395,119 @@ class OffsetField(Component):
numCoeff = 0
####Try and use Howard's polynomial fit code whenever possible
- if (rangeOrder == azimuthOrder) and (rangeOrder <= 3):
- if (rangeOrder == 1):
- if maxOrder:
- numCoeff = 3
- else:
- numCoeff = 4
- elif (rangeOrder == 2):
- if maxOrder:
- numCoeff = 6
- elif (rangeOrder == 3):
- if maxOrder:
- numCoeff = 10
+ try:
+ if (rangeOrder == azimuthOrder) and (rangeOrder <= 3):
+ if (rangeOrder == 1):
+ if maxOrder:
+ numCoeff = 3
+ else:
+ numCoeff = 4
+ elif (rangeOrder == 2):
+ if maxOrder:
+ numCoeff = 6
+ elif (rangeOrder == 3):
+ if maxOrder:
+ numCoeff = 10
+
+ inArr = np.array(self.unpackOffsets())
+ azmin = np.min(inArr[:, 2])
+ inArr[:, 2] -= azmin
+
+ ####Use Howard's code
+ if (numCoeff != 0) and not usenumpy:
+ x = list(inArr[:, 0])
+ y = list(inArr[:, 2])
+ dx = list(inArr[:, 1])
+ dy = list(inArr[:, 3])
+ sig = list(inArr[:, 4])
+
+ ####Range Offsets
+ obj = Offsetpoly()
+ obj.setLocationAcross(x)
+ obj.setLocationDown(y)
+ obj.setSNR(sig)
+ obj.setOffset(dx)
+ obj.numberFitCoefficients = numCoeff
+ obj.offsetpoly()
+
+ val = obj.offsetPoly
+
+ #####Unpack into 2D array
+ if numCoeff == 3:
+ coeffs = [[val[0], val[1]],
+ [val[2], 0.0]]
+
+ elif numCoeff == 4:
+ coeffs = [[val[0], val[1]],
+ [val[2], val[3]]]
+
+ elif numCoeff == 6:
+ coeffs = [[val[0], val[1], val[4]],
+ [val[2], val[3], 0.0],
+ [val[5], 0.0, 0.0]]
+
+ elif numCoeff == 10:
+ ####Unpacking needs to be checked.
+ coeffs = [[val[0], val[1], val[4], val[8]],
+ [val[2], val[3], val[6], 0.0],
+ [val[5], val[7], 0.0, 0.0],
+ [val[9], 0.0, 0.0, 0.0]]
+
+ rangePoly = Poly2D.Poly2D()
+ rangePoly.setMeanAzimuth(azmin)
+ rangePoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder, coeffs=coeffs)
+
+ ####Azimuth Offsets
+ obj.setOffset(dy)
+ obj.offsetpoly()
+ val = obj.offsetPoly
+
+ #####Unpack into 2D array
+ if numCoeff == 3:
+ coeffs = [[val[0], val[1]],
+ [val[2], 0.0]]
+
+ elif numCoeff == 4:
+ coeffs = [[val[0], val[1]],
+ [val[2], val[3]]]
+
+ elif numCoeff == 6:
+ coeffs = [[val[0], val[1], val[4]],
+ [val[2], val[3], 0.0],
+ [val[5], 0.0, 0.0]]
+
+ elif numCoeff == 10:
+ ####Unpacking needs to be checked.
+ coeffs = [[val[0], val[1], val[4], val[8]],
+ [val[2], val[3], val[6], 0.0],
+ [val[5], val[7], 0.0, 0.0],
+ [val[9], 0.0, 0.0, 0.0]]
+
+ azimuthPoly = Poly2D.Poly2D()
+ azimuthPoly.setMeanAzimuth(azmin)
+ azimuthPoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder, coeffs=coeffs)
+
+ ####Fallback to numpy based polynomial fitting
+ else:
+
+ x = inArr[:, 0]
+ y = inArr[:, 2]
+ dx = inArr[:, 1]
+ dy = inArr[:, 3]
+ sig = inArr[:, 4]
+
+ azimuthPoly = Poly2D.Poly2D()
+ azimuthPoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder)
+ azimuthPoly.polyfit(x, y, dy, sig=sig)
+ azimuthPoly._meanAzimuth += azmin
+
+ rangePoly = Poly2D.Poly2D()
+ rangePoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder)
+ rangePoly.polyfit(x, y, dx, sig=sig)
+ rangePoly._meanAzimuth += azmin
+ return (azimuthPoly, rangePoly)
+ except Exception as e:
+ print(e)
- inArr = np.array(self.unpackOffsets())
- azmin = np.min(inArr[:,2])
- inArr[:,2] -= azmin
- ####Use Howard's code
- if (numCoeff != 0) and not usenumpy:
- x = list(inArr[:,0])
- y = list(inArr[:,2])
- dx = list(inArr[:,1])
- dy = list(inArr[:,3])
- sig = list(inArr[:,4])
-
- ####Range Offsets
- obj = Offsetpoly()
- obj.setLocationAcross(x)
- obj.setLocationDown(y)
- obj.setSNR(sig)
- obj.setOffset(dx)
- obj.numberFitCoefficients = numCoeff
- obj.offsetpoly()
-
- val = obj.offsetPoly
-
- #####Unpack into 2D array
- if numCoeff == 3:
- coeffs = [[val[0], val[1]],
- [val[2], 0.0]]
-
- elif numCoeff == 4:
- coeffs = [[val[0], val[1]],
- [val[2], val[3]]]
-
- elif numCoeff == 6:
- coeffs = [[val[0], val[1], val[4]],
- [val[2], val[3], 0.0],
- [val[5], 0.0, 0.0]]
-
- elif numCoeff == 10:
- ####Unpacking needs to be checked.
- coeffs = [[val[0], val[1], val[4], val[8]],
- [val[2], val[3], val[6], 0.0],
- [val[5], val[7],0.0, 0.0],
- [val[9], 0.0, 0.0, 0.0]]
-
-
- rangePoly = Poly2D.Poly2D()
- rangePoly.setMeanAzimuth(azmin)
- rangePoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder, coeffs=coeffs)
-
- ####Azimuth Offsets
- obj.setOffset(dy)
- obj.offsetpoly()
- val = obj.offsetPoly
-
- #####Unpack into 2D array
- if numCoeff == 3:
- coeffs = [[val[0], val[1]],
- [val[2], 0.0]]
-
- elif numCoeff == 4:
- coeffs = [[val[0], val[1]],
- [val[2], val[3]]]
-
- elif numCoeff == 6:
- coeffs = [[val[0], val[1], val[4]],
- [val[2], val[3], 0.0],
- [val[5], 0.0, 0.0]]
-
- elif numCoeff == 10:
- ####Unpacking needs to be checked.
- coeffs = [[val[0], val[1], val[4], val[8]],
- [val[2], val[3], val[6], 0.0],
- [val[5], val[7],0.0, 0.0],
- [val[9], 0.0, 0.0, 0.0]]
-
- azimuthPoly = Poly2D.Poly2D()
- azimuthPoly.setMeanAzimuth(azmin)
- azimuthPoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder, coeffs=coeffs)
-
- ####Fallback to numpy based polynomial fitting
- else:
-
- x = inArr[:,0]
- y = inArr[:,2]
- dx = inArr[:,1]
- dy = inArr[:,3]
- sig = inArr[:,4]
-
-
- azimuthPoly = Poly2D.Poly2D()
- azimuthPoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder)
- azimuthPoly.polyfit(x,y,dy, sig=sig)
- azimuthPoly._meanAzimuth += azmin
-
- rangePoly = Poly2D.Poly2D()
- rangePoly.initPoly(rangeOrder=rangeOrder, azimuthOrder=azimuthOrder)
- rangePoly.polyfit(x,y,dx,sig=sig)
- rangePoly._meanAzimuth += azmin
-
- return (azimuthPoly, rangePoly)
diff --git a/dem-C-SAR/geocoding.py b/dem-C-SAR/geocoding.py
index 02388470..bd0f53f3 100644
--- a/dem-C-SAR/geocoding.py
+++ b/dem-C-SAR/geocoding.py
@@ -133,7 +133,7 @@ def geoCoding(tree,X_min,X_max,Y_min,Y_max,block_size,value_data,target_arr):
X_ids=XY_query[idx,0] # x
Y_ids=XY_query[idx,1] # y
XY_query=np.concatenate([X_ids.reshape(-1,1),Y_ids.reshape(-1,1)],axis=1)
- dist_mask=dist[:,3]<=50 ####################################################################################
+ dist_mask=dist[:,3]<=2 ####################################################################################
idx=np.where(dist_mask==1)
ind=ind[idx,:][0,:,:]
dist=dist[idx,:][0,:,:]
@@ -220,12 +220,12 @@ def get_Dem(isce_work_path, temp_work_path, pack_path, product_name, lamda):
los_data=read_tiff_dataset(los_tiff_path,band_idx=0)
lon_data=read_tiff_dataset(lon_tiff_path,band_idx=0)
lon = lon_data[0, :]
- lon_min = np.min(lon)
- lon_max = np.max(lon)
+ lon_min = np.min(lon_data)
+ lon_max = np.max(lon_data)
lat_data=read_tiff_dataset(lat_tiff_path,band_idx=0)
lat = lat_data[:, lat_data.shape[1]-1]
- lat_min = np.min(lat)
- lat_max = np.max(lat)
+ lat_min = np.min(lat_data)
+ lat_max = np.max(lat_data)
out_detrend_path = os.path.join(temp_work_path, "detrend_unw.tif")
unw_data = detrend_2d(unw_tiff_path, filt_topophase_cor_path, out_detrend_path)
# unw_data=read_tiff_dataset(unw_tiff_path,band_idx=0)
@@ -250,7 +250,7 @@ def get_Dem(isce_work_path, temp_work_path, pack_path, product_name, lamda):
# 形变值计算
def_data=-1*unw_data*lamda/(4*math.pi)
- hgt_data=hgt_data+def_data # 叠加isce高程
+ hgt_data= hgt_data + def_data # 叠加isce高程
# hgt_data=def_data # 叠加isce高程
hgt_tiff_path = os.path.join(temp_work_path, "hgt_data.tiff")
# ImageHandler.write_img(hgt_tiff_path, '', [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], hgt_data)
@@ -264,7 +264,7 @@ def get_Dem(isce_work_path, temp_work_path, pack_path, product_name, lamda):
inv_gt=gdal.InvGeoTransform(gt)
print(xsize,ysize)
- def_arr=np.zeros((int(ysize)+1,int(xsize)+1))-9999
+ # def_arr=np.zeros((int(ysize)+1,int(xsize)+1))-9999
dem_arr=np.zeros((int(ysize)+1,int(xsize)+1))-9999
X=inv_gt[0]+lon_data*inv_gt[1]+inv_gt[2]*lat_data
@@ -280,7 +280,7 @@ def get_Dem(isce_work_path, temp_work_path, pack_path, product_name, lamda):
Y_min=math.ceil(np.min(Y))
Y_max=math.floor(np.max(Y))
- block_size=1000
+ block_size=3000
dem_arr=geoCoding(tree,X_min,X_max,Y_min,Y_max,block_size,hgt_data,dem_arr)
if not os.path.exists(pack_path):
@@ -292,9 +292,9 @@ def get_Dem(isce_work_path, temp_work_path, pack_path, product_name, lamda):
return dem_target_data_path
if __name__ == '__main__':
- isce_work_path = r"D:\micro\microproduct_depdence\GF3-Deformation\isce_work"
- temp_work_path = r"D:\micro\microproduct_depdence\GF3-Deformation\isce_work\test"
- out_work_path = r"D:\micro\microproduct_depdence\GF3-Deformation\isce_work\test"
- product = r'D:\micro\microproduct_depdence\GF3-Deformation\isce_work\test\dem.tiff'
- get_Dem(isce_work_path, temp_work_path, out_work_path, product)
+ isce_work_path = r"D:\micro\WorkSpace\Dem\Temporary\processing\isce_workspace"
+ temp_work_path = r"D:\micro\WorkSpace\Dem\Temporary\processing\isce_workspace\test"
+ out_work_path = r"D:\micro\WorkSpace\Dem\Temporary\processing\isce_workspace\test"
+ product = r'D:\micro\WorkSpace\Dem\Temporary\processing\isce_workspace\test\dem.tiff'
+ get_Dem(isce_work_path, temp_work_path, out_work_path, product, 0.055517)
pass
\ No newline at end of file
diff --git a/dem-sentiral/geocoding.py b/dem-sentiral/geocoding.py
index 09a19405..faf2bd72 100644
--- a/dem-sentiral/geocoding.py
+++ b/dem-sentiral/geocoding.py
@@ -115,7 +115,7 @@ def geoCoding(tree,X_min,X_max,Y_min,Y_max,block_size,value_data,target_arr):
X_ids=XY_query[idx,0] # x
Y_ids=XY_query[idx,1] # y
XY_query=np.concatenate([X_ids.reshape(-1,1),Y_ids.reshape(-1,1)],axis=1)
- dist_mask=dist[:,3]<=50 ####################################################################################
+ dist_mask=dist[:,3]<=1 ####################################################################################
idx=np.where(dist_mask==1)
ind=ind[idx,:][0,:,:]
dist=dist[idx,:][0,:,:]
diff --git a/landcover_c_sar/LandCoverMain.py b/landcover_c_sar/LandCoverMain.py
index dad9311f..c3a9b80f 100644
--- a/landcover_c_sar/LandCoverMain.py
+++ b/landcover_c_sar/LandCoverMain.py
@@ -135,6 +135,7 @@ class LandCoverMain:
para_dic.update(InitPara.get_meta_dic_new(InitPara.get_meta_paths(file_dir, name), name))
# tif路径字典
para_dic.update(InitPara.get_polarization_mode(InitPara.get_tif_paths(file_dir, name)))
+ para_dic.update({'incXML': InitPara.get_incidence_xml_paths(file_dir, name)[0]})
parameter_path = os.path.join(file_dir, "orth_para.txt")
para_dic.update({"paraMeter": parameter_path})
return para_dic
@@ -500,7 +501,13 @@ class LandCoverMain:
polarization = ['HH', 'HV', 'VH', 'VV']
calibration = Calibration.get_Calibration_coefficient(self.__processing_paras['Origin_META'], polarization)
tif_path = atp.calibration(calibration, in_ahv_dir=self.__workspace_preprocessed_path)
- atp.ahv_to_polsarpro_t3_soil(t3_path, tif_path)
+
+ inc_path = os.path.join(self.__workspace_processing_path, 'inc_angle.tif')
+ in_tif_paths = list(glob.glob(os.path.join(tif_path, '*.tif')))
+ rows = ImageHandler.get_img_height(in_tif_paths[0])
+ ImageHandler.get_inc_angle(self.__processing_paras['incXML'], rows, 1, inc_path)
+
+ atp.ahv_to_polsarpro_t3_soil(t3_path, inc_path, tif_path)
# Lee滤波
leeFilter = LeeRefinedFilterT3()
diff --git a/landcover_c_sar/LandCoverMain.spec b/landcover_c_sar/LandCoverMain.spec
index dbd0c85f..2114ef73 100644
--- a/landcover_c_sar/LandCoverMain.spec
+++ b/landcover_c_sar/LandCoverMain.spec
@@ -1,6 +1,30 @@
# -*- mode: python ; coding: utf-8 -*-
+import sys
+from shutil import copy
+import os
+
+cwdpath = os.getcwd()
+toolDir = os.path.join(cwdpath, 'tool')
+if os.path.exists(toolDir):
+ os.remove(toolDir)
+os.mkdir(toolDir)
+source_folder = '../tool'
+
+def copy_file(path_read, path_write):
+ names = os.listdir(path_read)
+ for name in names:
+ path_read_new = os.path.join(path_read, name)
+ path_write_new = os.path.join(path_write, name)
+ if os.path.isdir(path_read_new):
+ if not os.path.exists(path_write_new):
+ os.mkdir(path_write_new)
+ copy_file(path_read_new, path_write_new)
+ else:
+ copy(path_read_new, path_write_new)
+
+copy_file(source_folder, toolDir)
block_cipher = None
diff --git a/landcover_c_sar/LandCover_C_SAR_V3.xml b/landcover_c_sar/LandCover_C_SAR_V3.xml
index b9ad1805..137816c4 100644
--- a/landcover_c_sar/LandCover_C_SAR_V3.xml
+++ b/landcover_c_sar/LandCover_C_SAR_V3.xml
@@ -42,7 +42,7 @@
DEFAULT
Cal
- F:\isce_yuan\lancover-yuan\GF3B_KSC_QPSI_010328_E86.1_N44.5_20231109_L1A_AHV_L10000262134-ortho.tar.gz
+ F:\MicroWorkspace\GF3A_nanjing\input-ortho\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho.tar.gz
True
False
File
@@ -60,7 +60,7 @@
DEFAULT
DEFAULT
Cal
- F:\isce_yuan\lancover-yuan\train_landConver1.csv
+ F:\MicroWorkspace\GF3A_nanjing\input-ortho\LandCoverLable_geo_5.csv
True
True
UploadTable
@@ -99,7 +99,7 @@
DEFAULT
DEFAULT
Man
- 0,1,2
+ 0,1,2,3,4,5,6
True
True
UploadInput
@@ -120,7 +120,7 @@
DEFAULT
DEFAULT
Man
- D:\micro\WorkSpace\LandCover\Output\GF3B_KSC_QPSI_010328_E86.1_N44.5_20231109_L1A_AHV_L10000262134-ortho-LANDCLASS.tar.gz
+ D:\micro\WorkSpace\LandCover\Output\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho-LANDCLASS.tar.gz
diff --git a/leafAreaIndex/LeafAreaIndex.xml b/leafAreaIndex/LeafAreaIndex.xml
index 73a0c56b..9070e1ce 100644
--- a/leafAreaIndex/LeafAreaIndex.xml
+++ b/leafAreaIndex/LeafAreaIndex.xml
@@ -53,7 +53,7 @@
File
tar.gz
Man
- F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-cal.tar.gz
+ D:\micro\WorkSpace\BackScattering\Output\GF3_MDJ_QPSI_031847_E116.4_N43.9_20220828_L1A_AHV_L10006708819-cal.tar.gz
True
False
File
@@ -83,7 +83,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-cal-SMC.tif
+ F:\al_zhongji\C-SAR-data\CSAR_leaf_NEW_soil_moisture.tif
True
False
File
@@ -96,9 +96,9 @@
叶面积指数实测数据
叶面积指数实测数据
File
- tif
+ csv
Man
- F:\Tian-GF3-Wenchang\LAI\LAi.csv
+ F:\al_zhongji\C-SAR-data\CSAR_leaf_new_LAI.csv
True
False
File
@@ -113,7 +113,7 @@
File
tif
Cal
- F:\Tian-GF3-Wenchang\NDVI\S2_NDVImed_SMC_GF3.tif
+ F:\MicroWorkspace\Micro\neimenggu\L9NDVI.tif
True
False
File
@@ -128,7 +128,7 @@
Value
string
Man
- 0;1
+ -1;1
True
False
UploadInput
@@ -143,7 +143,7 @@
File
tif
Cal
- F:\Tian-GF3-Wenchang\landCover.tif
+ F:\MicroWorkspace\Micro\neimenggu\N50_40_2020LC030_geo.tif
True
False
File
@@ -158,7 +158,7 @@
Value
string
Man
- empty
+ 10;20;30;40
True
False
UploadInput
@@ -173,7 +173,7 @@
Value
float
Cal
- -31.6
+ 0.0118770619
True
False
UploadInput
@@ -188,7 +188,7 @@
Value
float
Cal
- 0.01
+ 1.15098022
True
False
UploadInput
@@ -203,7 +203,7 @@
Value
float
Cal
- 0.001
+ -0.146299064
True
False
UploadInput
@@ -218,7 +218,7 @@
Value
float
Cal
- -5
+ 565.461513
True
False
UploadInput
@@ -235,7 +235,7 @@
File
tar.gz
Cal
- D:\micro\WorkSpace\LeafAreaIndex\Output\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-cal-LAI.tar.gz
+ D:\micro\WorkSpace\LeafAreaIndex\Output\GF3_MDJ_QPSI_031847_E116.4_N43.9_20220828_L1A_AHV_L10006708819-cal-LAI.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/leafAreaIndex/LeafAreaIndex_C_SAR_V3.xml b/leafAreaIndex/LeafAreaIndex_C_SAR_V3.xml
index 013fdcd1..7e21472a 100644
--- a/leafAreaIndex/LeafAreaIndex_C_SAR_V3.xml
+++ b/leafAreaIndex/LeafAreaIndex_C_SAR_V3.xml
@@ -60,7 +60,7 @@
DEFAULT
Cal
- E:\MicroWorkspace\GF3A_nanjing\soilMoisture_data\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_back.tar.gz
+ F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-cal.tar.gz
True
False
File
@@ -96,7 +96,7 @@
DEFAULT
DEFAULT
Cal
- D:\micro\WorkSpace\TOP\SoilMoisture11\Temporary\processing\soil_moisture.tif
+ F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-cal-SMC.tif
True
False
UploadTable
@@ -114,7 +114,7 @@
DEFAULT
DEFAULT
Cal
- D:\estar-proj\microproduct_depdence\model_tool\process_lai\laiplus2.csv
+ F:\Tian-GF3-Wenchang\LAI\LAi.csv
True
False
UploadTable
@@ -132,7 +132,7 @@
DEFAULT
DEFAULT
Cal
- E:\MicroWorkspace\GF3A_nanjing\soilMoisture_data\NDVI_E118.9_N31.5.tif
+ F:\Tian-GF3-Wenchang\NDVI\S2_NDVImed_SMC_GF3.tif
True
False
UploadTable
@@ -150,7 +150,7 @@
DEFAULT
DEFAULT
Man
- 0;1
+ 0.2;1
True
False
UploadInput
@@ -168,7 +168,7 @@
DEFAULT
DEFAULT
Cal
- E:\MicroWorkspace\GF3A_nanjing\soilMoisture_data\Covering_preprocessed.tif
+ F:\Tian-GF3-Wenchang\LandCover.tif
True
False
UploadTable
@@ -187,7 +187,7 @@
DEFAULT
DEFAULT
Man
- 10;20;30;40;50;60;80;90
+ 10;20;30;40
True
False
UploadInput
@@ -205,7 +205,7 @@
DEFAULT
DEFAULT
DEFAULT
- -31.6
+ 0.0118770619
True
False
UploadInput
@@ -223,7 +223,7 @@
DEFAULT
DEFAULT
Man
- 0.01
+ 1.15098022
True
False
UploadInput
@@ -241,7 +241,7 @@
DEFAULT
DEFAULT
DEFAULT
- 0.001
+ -0.146299064
True
False
UploadInput
@@ -259,7 +259,7 @@
DEFAULT
DEFAULT
Man
- -5
+ 565.461513
True
False
UploadInput
@@ -276,8 +276,7 @@
File
tar.gz
Cal
-
- D:\micro\WorkSpace\LeafAreaIndex\Output\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422_back-lef.tar.gz
+ D:\micro\WorkSpace\LeafAreaIndex\Output\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-cal-LAI.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/leafAreaIndex/LeafIndexMain.py b/leafAreaIndex/LeafIndexMain.py
index cfbd5351..46da5644 100644
--- a/leafAreaIndex/LeafIndexMain.py
+++ b/leafAreaIndex/LeafIndexMain.py
@@ -38,8 +38,8 @@ from scipy.interpolate import griddata
import sys
import multiprocessing
from tool.file.fileHandle import fileHandle
-from sample_process import read_sample_csv,combine_sample_attr,ReprojectImages2,read_tiff,check_sample,split_sample_list
-from tool.LAI.LAIProcess import train_WMCmodel,test_WMCModel,process_tiff
+from sample_process import read_sample_csv,combine_sample_attr,ReprojectImages2,read_tiff,check_sample, train_WMCmodel
+# from tool.LAI.LAIProcess import train_WMCmodel,test_WMCModel,process_tiff
cover_id_list = []
threshold_of_ndvi_min = 0
@@ -227,12 +227,13 @@ class LeafIndexMain:
# 利用角度为nan生成Mask
pp.check_LocalIncidenceAngle(self.__preprocessed_paras['LocalIncidenceAngle'], self.__preprocessed_paras['LocalIncidenceAngle'])
angle_nan_mask_path = processing_path + 'angle_nan_mask.tif'
- roi.trans_tif2mask(angle_nan_mask_path, self.__preprocessed_paras['LocalIncidenceAngle'], np.nan)
+ roi.trans_tif2mask(angle_nan_mask_path, self.__preprocessed_paras['LocalIncidenceAngle'], 0, 90)
# 利用影像的有效范围生成MASK
tif_nan_mask_path = processing_path + 'tif_mask_nan.tif'
- roi.trans_tif2mask(tif_nan_mask_path, self.__preprocessed_paras[self.__sar_tif_name], np.nan)
- # tif_zero_mask_path = processing_path + 'tif_mask_zero.tif'
- # roi.trans_tif2mask(tif_zero_mask_path, self.__preprocessed_paras[self.__sar_tif_name], 0,0)
+ # roi.trans_tif2mask(tif_nan_mask_path, self.__preprocessed_paras[self.__sar_tif_name], np.nan)
+ roi.trans_tif2mask(tif_nan_mask_path, self.__preprocessed_paras[self.__sar_tif_name], 0, 0) # 0,0修改为np.nan
+ tif_zero_mask_path = processing_path + 'tif_mask_zero.tif'
+ roi.trans_tif2mask(tif_zero_mask_path, self.__preprocessed_paras[self.__sar_tif_name], np.nan)
# 2、利用cover计算植被覆盖范围
cover_mask_path = self.__workspace_processing_path + "SurfaceCover_mask.tif"
@@ -260,7 +261,7 @@ class LeafIndexMain:
bare_land_mask_path = self.__workspace_processing_path + "bare_land_mask.tif"
roi.combine_mask(bare_land_mask_path, ndvi_mask_path, cover_mask_path)
roi.combine_mask(bare_land_mask_path, bare_land_mask_path, tif_nan_mask_path)
- # roi.combine_mask(bare_land_mask_path, bare_land_mask_path, tif_zero_mask_path)
+ roi.combine_mask(bare_land_mask_path, bare_land_mask_path, tif_zero_mask_path)
roi.combine_mask(bare_land_mask_path, bare_land_mask_path, angle_nan_mask_path)
logger.info('combine_mask success!')
@@ -291,6 +292,8 @@ class LeafIndexMain:
v1 = 1
vm_value = self.imageHandler.get_band_array(vm_file, 1)
image_value = self.imageHandler.get_band_array(image_file, 1)
+ # image_value = np.power(10, (image_value / 10))
+ # image_value[np.isnan(image_value)] = 0
# Filter.lee_filter_array(image_value, image_value, FILTER_SISE)
@@ -392,9 +395,8 @@ class LeafIndexMain:
incident_angle) # ['日期','样方编号','经度','纬度','叶面积指数',"后向散射系数",'土壤含水量','入射角']
lai_waiter_inc_sigma_list = combine_sample_attr(lai_water_inc_list,
sigma_tiff) # ['日期','样方编号','经度','纬度','叶面积指数',"后向散射系数",'土壤含水量','入射角','后向散射系数']
- # lai_waiter_inc_sigma_NDVI_list=combine_sample_attr(lai_waiter_inc_sigma_list,NDVI_tiff) # ['日期','样方编号','经度','纬度','叶面积指数',"后向散射系数",'土壤含水量','入射角','后向散射系数','NDVI']
- lai_waiter_inc_sigma_list = check_sample(
- lai_waiter_inc_sigma_list) # 清理样本 ['日期','样方编号','经度','纬度','叶面积指数',"后向散射系数",'土壤含水量','入射角','后向散射系数']
+ lai_waiter_inc_sigma_NDVI_list = combine_sample_attr(lai_waiter_inc_sigma_list, NDVI_tiff)
+ lai_waiter_inc_sigma_list = check_sample(lai_waiter_inc_sigma_NDVI_list) # 清理样本 ['日期','样方编号','经度','纬度','叶面积指数',"后向散射系数",'土壤含水量','入射角','后向散射系数']
# lai_waiter_inc_sigma_NDVI_list=check_sample(lai_waiter_inc_sigma_NDVI_list) # ['日期','样方编号','经度','纬度','叶面积指数',"后向散射系数",'土壤含水量','入射角','后向散射系数','NDVI']
# 数据集筛选
lai_waiter_inc_sigma_list_result = []
@@ -415,14 +417,14 @@ class LeafIndexMain:
b = self.__processing_paras["B"]
c = self.__processing_paras["C"]
d = self.__processing_paras["D"]
- params_X0 = [a, b, c, d, 0.771, -0.028]
+ params_X0 = [a, b, c, d]
params_arr = train_WMCmodel(sample_train, params_X0, train_err_image_path, False)
logging.info("模型初值:\t{}".format(str(params_X0)))
logging.info("训练得到的模型系数:\t{}".format(str(params_arr)))
- self.__processing_paras.update({"A": params_arr[0]})
- self.__processing_paras.update({"B": params_arr[1]})
- self.__processing_paras.update({"C": params_arr[2]})
- self.__processing_paras.update({"D": params_arr[3]})
+ # self.__processing_paras.update({"A": params_arr[0]})
+ # self.__processing_paras.update({"B": params_arr[1]})
+ # self.__processing_paras.update({"C": params_arr[2]})
+ # self.__processing_paras.update({"D": params_arr[3]})
def block_process(self,start):
"""
@@ -468,12 +470,13 @@ class LeafIndexMain:
vm_path = self.__workspace_cai_SoilMoi_path + "soil_moisture" + suffix
angle_path = self.__workspace_cai_localang_path + "LocalIncidenceAngle_preproed" + suffix
v2_save_path = self.__workspace_Leafindex_path + "LeafAreaIndexProduct" + suffix
- pl.append(pool.apply_async(self.cal_leaf_index, (vm_path, tif_path, angle_path, v2_save_path, num)))
+ # pl.append(pool.apply_async(self.cal_leaf_index, (vm_path, tif_path, angle_path, v2_save_path, num)))
+ self.cal_leaf_index(vm_path, tif_path, angle_path, v2_save_path, num)
logger.info('total:%s, block:%s calculating leaf_index!', len(in_tif_paths), num)
num = num + 1
- pool.close()
- pool.join()
+ # pool.close()
+ # pool.join()
logger.info('all img cal LeafIndex success!')
logger.info('progress bar: 80%')
ref_tif_path = self.__ref_img_path
diff --git a/leafAreaIndex/LeafIndexXmlInfo.py b/leafAreaIndex/LeafIndexXmlInfo.py
index f2fa11ec..8a173cb0 100644
--- a/leafAreaIndex/LeafIndexXmlInfo.py
+++ b/leafAreaIndex/LeafIndexXmlInfo.py
@@ -6,7 +6,8 @@
@Date :2021/10/19 14:39
@Version :1.0.0
"""
-
+import glob
+import os
from xml.etree.ElementTree import ElementTree
import shutil
from tool.algorithm.image.ImageHandle import ImageHandler
@@ -232,3 +233,19 @@ class CreateStadardXmlFile:
Parameter.find("CoverThresholdMax").text = str(self.set_threshold[3])
tree.write(self.path, encoding="utf-8", xml_declaration=True)
+
+if __name__ == '__main__':
+ tif_dir = r'E:\SoilMoisture_cal\GF3BWENCANGtif'
+ out_dir = r'E:\SoilMoisture_cal\GF3_result'
+ tifs = list(glob.glob(os.path.join(tif_dir, "*.tif")))
+ for tif in tifs:
+ name = os.path.splitext(os.path.basename(tif))[0]
+ outPath = os.path.join(out_dir, name + '_lee.tif')
+
+ exe_path = r".\baseTool\x64\Release\SIMOrthoProgram.exe"
+ exe_cmd = r"set PROJ_LIB=.\baseTool\x64\Release; & {0} {1} {2} {3} {4} {5}".format(exe_path, 12, tif,
+ outPath, 7, 0.25)
+ print(exe_cmd)
+ print(os.system(exe_cmd))
+ print("==========================================================================")
+
diff --git a/leafAreaIndex/config.ini b/leafAreaIndex/config.ini
index 3d44d427..4070d6ae 100644
--- a/leafAreaIndex/config.ini
+++ b/leafAreaIndex/config.ini
@@ -4,7 +4,7 @@
# 算法名称。修改临时工作区生成临时文件的名称,日志名称;
exe_name = LeafAreaIndex
# 开启调试模式则不删除临时工作区,True:开启调试,False:不开启调试
-debug = True
+debug = False
tar = LAI
productLevel = 5
# 影像滤波尺寸。取值范围:大于1的奇数,默认值:7
diff --git a/leafAreaIndex/sample_process.py b/leafAreaIndex/sample_process.py
index f727c8b3..31462d51 100644
--- a/leafAreaIndex/sample_process.py
+++ b/leafAreaIndex/sample_process.py
@@ -1,7 +1,7 @@
#
# 样本处理的相关的库
#
-
+import logging
from tool.algorithm.image.ImageHandle import ImageHandler
import math
import numpy as np
@@ -15,7 +15,7 @@ from scipy import interpolate
from multiprocessing import pool
# 常量声明区域
imageHandler=ImageHandler()
-
+logger = logging.getLogger("mylog")
# python 的函数类
def read_sample_csv(csv_path):
@@ -202,3 +202,84 @@ def split_sample_list(sample_list,train_ratio):
return [sample_train,sample_test]
+def WMC(A,B,M,N,sample_soil,sample_inc,sample_sigma,sample_NDVI):
+ sigma_soil=M*sample_soil+N
+ theta=np.cos(sample_inc)
+ Atheta=A*theta
+ lnV=(sample_sigma-Atheta)/(sigma_soil-Atheta)
+ Mveg=-1*(theta/(2*B))*np.log(lnV)
+ # lai_ndvi=0*E*sample_NDVI+F
+ # return lai_ndvi
+ LAI=Mveg#(lai_ndvi+Mveg)/2
+ return LAI
+
+
+def WMCModel(param_arr,sample_lai,sample_soil,sample_inc,sample_sigma,sample_NDVI):
+ """ WMC模型 增加 归一化植被指数
+ Args:
+ param_arr (np.ndarray): 参数数组
+ sample_lai (double): 叶面积指数
+ sample_soil (double): 土壤含水量
+ sample_inc (double): 入射角(弧度值)
+ sample_sigma (double): 后向散射系数(线性值)
+ Returns:
+ double: 方程值
+ """
+ # 映射参数,方便修改模型
+ A,B,M,N=param_arr # 在这里修改模型
+ LAI=WMC(A,B,M,N,sample_soil,sample_inc,sample_sigma,sample_NDVI)
+ result=LAI-sample_lai
+ return result
+
+
+def train_WMCmodel(lai_water_inc_sigma_list, params_X0, train_err_image_path, draw_flag=False):
+ """ 训练模型参数
+ Args:
+ lai_waiter_inc_sigma_list (list): 训练模型使用的样本呢
+ """
+
+ def f(X):
+ eqs = []
+ for lai_water_inc_sigma_item in lai_water_inc_sigma_list:
+ sample_lai = lai_water_inc_sigma_item[4]
+ sample_sigma = lai_water_inc_sigma_item[8] # 5: csv_sigma, 8:tiff_sigma
+ sample_soil = lai_water_inc_sigma_item[6]
+ sample_inc = lai_water_inc_sigma_item[7]
+ FVC = lai_water_inc_sigma_item[8]
+ sample_NDVI = lai_water_inc_sigma_item[9]
+ eqs.append(WMCModel(X, sample_lai, sample_soil, sample_inc, sample_sigma, sample_NDVI))
+ return eqs
+
+ X0 = params_X0 # 初始值
+ logger.info(str(X0))
+ h = leastsq(f, X0)
+ logger.info(h[0], h[1])
+ err_f = f(h[0])
+ x_arr = [lai_waiter_inc_sigma_item[4] for lai_waiter_inc_sigma_item in lai_water_inc_sigma_list]
+ # 根据误差大小进行排序
+ logger.info("训练集:\n根据误差输出点序\n数量:{}\n点序\t误差值\t 样点信息".format(str(np.array(err_f).shape)))
+ for i in np.argsort(np.array(err_f)):
+ logger.info('{}\t{}\t{}'.format(i, err_f[i], str(lai_water_inc_sigma_list[i])))
+ logger.info("\n误差点序输出结束\n")
+
+ pred_lai = []
+ for i in range(len(err_f)):
+ pred_lai.append(err_f[i] + x_arr[i])
+
+ if draw_flag:
+ logger.info(err_f)
+ logger.info(np.where(np.abs(err_f) < 10))
+ from matplotlib import pyplot as plt
+ print(len(err_f), len(x_arr))
+ # plt.scatter(x_arr, pred_lai)
+ # plt.title("simulation Sigma and sample sigma")
+ # plt.xlabel("sample sigma")
+ # plt.ylabel("simulation sigma")
+ # plt.xlim(0, 10)
+ # plt.ylim(0, 10)
+ # plt.plot([0, 10], [0, 10])
+ # plt.savefig(train_err_image_path, dpi=600)
+ # plt.show()
+ return h[0]
+
+
diff --git a/soilMoistureTop/SoilMoisture.xml b/soilMoistureTop/SoilMoisture.xml
index c41f41a4..a1ee72d0 100644
--- a/soilMoistureTop/SoilMoisture.xml
+++ b/soilMoistureTop/SoilMoisture.xml
@@ -36,7 +36,7 @@
File
tar.gz
Man
- F:\Tian-GF3-Wenchang\GF3_SYC_QPSI_040488_E110.7_N20.1_20240418_L1A_AHV_L10006923782-cal.tar.gz
+ F:\202306pj\GF3B_MYC_QPSI_008114_E121.6_N40.9_20230608_L1A_AHV_L10000196489-cal.tar.gz
Covering
@@ -45,7 +45,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\landCover_Glob30.tif
+ F:\202306pj\n51_40_2020lc030.tif
CoveringIDs
@@ -54,7 +54,7 @@
Value
string
Man
- 10;20;30;40;70;90
+ 10;20;30;40;50
DEFAULT
DEFAULT
DEFAULT
@@ -66,7 +66,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\NDVI\S2_NDVImed_SMC_GF3.tif
+ F:\202306pj\L9NDVI_GF3B_175394.tif
NDVIScope
@@ -100,7 +100,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\NDWI\S2_NDWImed_SMC_GF3.tif
+ F:\202306pj\ndwi\S2_NDWImed_panjin.tif
e1
@@ -109,7 +109,7 @@
Value
float
Man
- -12.772025319313864
+ -42.471051034087125
100
-100
DEFAULT
@@ -121,7 +121,7 @@
Value
float
Man
- 11.555079721123748
+ -18.09398216319936
100
-100
DEFAULT
@@ -133,7 +133,7 @@
Value
float
Man
- 28.605979879379152
+ -460667.08274463506
9999
-9999
DEFAULT
@@ -145,7 +145,7 @@
Value
float
Man
- -0.04303182290305167
+ 0.16445643200517854
9999
-9999
DEFAULT
@@ -157,7 +157,7 @@
Value
float
Man
- -17.212613314078236
+ -604.983884544745
9999
-9999
DEFAULT
@@ -169,7 +169,7 @@
Value
float
Man
- 1.1965671296029239
+ 1.498634203391113
9999
-9999
DEFAULT
@@ -183,7 +183,7 @@
File
tar.gz
Man
- D:\micro\WorkSpace\SoilMoisture\Output\GF3_SYC_QPSI_040488_E110.7_N20.1_20240418_L1A_AHV_L10006923782-cal-SMC.tar.gz
+ D:\micro\WorkSpace\SoilMoisture\Output\GF3B_MYC_QPSI_008114_E121.6_N40.9_20230608_L1A_AHV_L10000196489-cal-SMC.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/soilMoistureTop/SoilMoistureALg.py b/soilMoistureTop/SoilMoistureALg.py
index 931c7193..c276cc6e 100644
--- a/soilMoistureTop/SoilMoistureALg.py
+++ b/soilMoistureTop/SoilMoistureALg.py
@@ -137,8 +137,8 @@ class MoistureAlg:
return False
- mask_array = ImageHandler.get_band_array(mask_path, 1)
- soil_dielectric[np.where(mask_array == 0)] = np.nan
+ # mask_array = ImageHandler.get_band_array(mask_path, 1)
+ # soil_dielectric[np.where(mask_array == 0)] = np.nan
try:
# Dobsen模型计算土壤水分
@@ -149,8 +149,8 @@ class MoistureAlg:
5.5 * np.power(10.0, -4) * np.power(soil_dielectric, 2) + \
4.3 * np.power(10.0, -6) * np.power(soil_dielectric, 3)
# 处理异常值
- soil_moisture[where_tmp1_0] = 0
- soil_moisture[where_tmp2_0] = 0
+ # soil_moisture[where_tmp1_0] = 0
+ # soil_moisture[where_tmp2_0] = 0
except BaseException as msg:
logger.error(msg)
return False
diff --git a/soilMoistureTop/SoilMoistureMain.py b/soilMoistureTop/SoilMoistureMain.py
index 45e15a11..09a43dee 100644
--- a/soilMoistureTop/SoilMoistureMain.py
+++ b/soilMoistureTop/SoilMoistureMain.py
@@ -379,6 +379,7 @@ class MoistureMain:
# data[data < soil_moisture_value_min] = soil_moisture_value_min
# data[data > soil_moisture_value_max] = soil_moisture_value_max
+ data = data/100
data[data < soil_moisture_value_min] = -9999
data[data > soil_moisture_value_max] = -9999
self.imageHandler.write_img(product_path, proj, geos, data, '-9999')
@@ -464,6 +465,7 @@ if __name__ == '__main__':
except Exception:
logger.exception('run-time error!')
finally:
+
main_handler.del_temp_workspace()
pass
end = datetime.datetime.now()
diff --git a/soilMoisture_geo_sar/SoilMoistureMain.py b/soilMoisture_geo_sar/SoilMoistureMain.py
index e704656a..8966efeb 100644
--- a/soilMoisture_geo_sar/SoilMoistureMain.py
+++ b/soilMoisture_geo_sar/SoilMoistureMain.py
@@ -465,7 +465,7 @@ class MoistureMain:
for i in range(1, len(paths)):
SrcImageName=SrcImageName+";"+os.path.split(paths[i])[1].split('.tar.gz')[0]
meta_xml_path = self.__product_dic+EXE_NAME+"Product.meta.xml"
- CreateMetafile(self.__processing_paras['META'], self.alg_xml_path, model_xml_path, meta_xml_path).process(SrcImageName)
+ CreateMetafile(self.__processing_paras[self.__sar_names_list[0] + '_Origin_META'], self.alg_xml_path, model_xml_path, meta_xml_path).process(SrcImageName)
# 文件夹打包
diff --git a/soilMoisture_geo_sar/SoilMoisture_geo.xml b/soilMoisture_geo_sar/SoilMoisture_geo.xml
index b15c1b02..6a159efa 100644
--- a/soilMoisture_geo_sar/SoilMoisture_geo.xml
+++ b/soilMoisture_geo_sar/SoilMoisture_geo.xml
@@ -56,8 +56,8 @@
File
tar.gz
Man
- F:\202306hb\sar_img\GF3B_SYC_QPSI_008316_E116.0_N43.0_20230622_L1A_AHV_L10000202893-cal.tar.gz;
- F:\202306hb\sar_img\GF3B_SYC_QPSI_008316_E116.1_N43.3_20230622_L1A_AHV_L10000202892-cal.tar.gz
+ F:\MicroWorkspace\Micro\soil_geo\230224deqing\GF3_SAY_QPSI_008634_E102.3_N33.7_20180331_L1A_AHV_L10003096081-cal.tar.gz;
+ F:\MicroWorkspace\Micro\soil_geo\230224deqing\GF3_SAY_QPSI_009051_E102.3_N33.7_20180429_L1A_AHV_L10003153708-cal.tar.gz
True
False
@@ -88,7 +88,7 @@
File
tif
Man
- F:\202306hb\Landcover\50T_20220101-20230101.tif
+ F:\MicroWorkspace\Micro\soil_geo\cover\LandCover.tif
True
False
File
@@ -118,7 +118,7 @@
File
tif
Man
- F:\202306hb\NDVI\S2_202306_NDVI.tif
+ F:\MicroWorkspace\Micro\soil_geo\201806NDVI\201806_NDVImax.tif
True
False
File
diff --git a/soilSalinity-Train_predict/SoilSalinityPredict.xml b/soilSalinity-Train_predict/SoilSalinityPredict.xml
index 2afe66d6..2b1d6f9f 100644
--- a/soilSalinity-Train_predict/SoilSalinityPredict.xml
+++ b/soilSalinity-Train_predict/SoilSalinityPredict.xml
@@ -68,7 +68,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\landCover_Glob30.tif
+ F:\Tian-GF3-Wenchang\LandCover.tif
DEFAULT
DEFAULT
DEFAULT
@@ -82,7 +82,7 @@
File
zip
Man
- D:\micro\WorkSpace\SoilSalinityTrain\Output\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho-SSAA.zip
+ G:\算法备份\micro\WorkSpace\SoilSalinityTrain\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho-SSAA.zip
DEFAULT
DEFAULT
DEFAULT
@@ -96,7 +96,7 @@
Value
string
Man
- 10;20;30;40;70;90
+ 10;20;30;40;51;52;53;54;55;56;57;58;59;90
DEFAULT
DEFAULT
DEFAULT
diff --git a/soilSalinity/SoilSalinity.xml b/soilSalinity/SoilSalinity.xml
index e6117353..25642b43 100644
--- a/soilSalinity/SoilSalinity.xml
+++ b/soilSalinity/SoilSalinity.xml
@@ -38,7 +38,7 @@
File
tar.gz
Man
- F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho.tar.gz
+ F:\2024xibei\GF3C_KSC_QPSI_008440_E86.0_N44.7_20231113_L1A_AHV_L10000215825-ortho.tar.gz
DEFAULT
DEFAULT
DEFAULT
@@ -66,7 +66,7 @@
File
csv
Man
- F:\Tian-GF3-Wenchang\soilSality.csv
+ F:\2024xibei\soilty.csv
DEFAULT
DEFAULT
DEFAULT
@@ -80,7 +80,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\landCover.tif
+ F:\2024xibei\LandaCover.tif
DEFAULT
DEFAULT
DEFAULT
@@ -94,7 +94,7 @@
Value
string
Man
- empty
+ 90
DEFAULT
DEFAULT
DEFAULT
@@ -108,7 +108,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\NDVI\S2_NDVImed_SMC_GF3.tif
+ F:\2024xibei\S2_NDVImed2.tif
DEFAULT
DEFAULT
DEFAULT
@@ -138,7 +138,7 @@
File
tar.gz
Man
- D:\micro\WorkSpace\SoilSalinity\Output\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho-SSAA.tar.gz
+ D:\micro\WorkSpace\SoilSalinity\Output\GF3C_KSC_QPSI_008440_E86.0_N44.7_20231113_L1A_AHV_L10000215825-ortho-SSAA.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/soilSalinity/SoilSalinityMain.py b/soilSalinity/SoilSalinityMain.py
index e3b3aa1e..698b3da5 100644
--- a/soilSalinity/SoilSalinityMain.py
+++ b/soilSalinity/SoilSalinityMain.py
@@ -10,6 +10,7 @@
[修改序列] [修改日期] [修改者] [修改内容]
1 2022-6-27 石海军 1.增加配置文件config.ini; 2.内部处理使用地理坐标系(4326);
"""
+import glob
import logging
import shutil
@@ -122,6 +123,7 @@ class SalinityMain:
# 元文件字典
# para_dic.update(InitPara.get_meta_dic(InitPara.get_meta_paths(file_dir, name), name))
para_dic.update(InitPara.get_meta_dic_new(InitPara.get_meta_paths(file_dir, name), name))
+ para_dic.update({'incXML': InitPara.get_incidence_xml_paths(file_dir, name)[0]})
# tif路径字典
para_dic.update(InitPara.get_polarization_mode(InitPara.get_tif_paths(file_dir, name)))
parameter_path = os.path.join(file_dir, "orth_para.txt")
@@ -220,7 +222,13 @@ class SalinityMain:
polarization = ['HH', 'HV', 'VH', 'VV']
calibration = Calibration.get_Calibration_coefficient(self.__processing_paras['Origin_META'], polarization)
tif_path = atp.calibration(calibration, in_ahv_dir=self.__workspace_preprocessed_path)
- atp.ahv_to_polsarpro_t3_soil(t3_path, tif_path)
+
+ inc_path = os.path.join(self.__workspace_processing_path, 'inc_angle.tif')
+ in_tif_paths = list(glob.glob(os.path.join(ahv_path, '*.tif')))
+ rows = ImageHandler.get_img_height(in_tif_paths[0])
+ ImageHandler.get_inc_angle(self.__processing_paras['incXML'], rows, 1, inc_path)
+
+ atp.ahv_to_polsarpro_t3_soil(t3_path, inc_path, tif_path)
logger.info('ahv transform to polsarpro T3 matrix success!')
logger.info('progress bar: 20%')
diff --git a/surfaceRoughness_oh2004/SurfaceRoughness.xml b/surfaceRoughness_oh2004/SurfaceRoughness.xml
index 9fff817a..f5bc62b3 100644
--- a/surfaceRoughness_oh2004/SurfaceRoughness.xml
+++ b/surfaceRoughness_oh2004/SurfaceRoughness.xml
@@ -38,7 +38,7 @@
File
tar.gz
Man
- F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho.tar.gz
+ F:\2024xibei\GF3B_KSC_QPSI_010328_E86.0_N44.7_20231109_L1A_AHV_L10000262135-ortho.tar.gz
True
False
File
@@ -68,7 +68,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\landCover_Glob30.tif
+ F:\2024xibei\LandaCover.tif
True
False
File
@@ -83,7 +83,7 @@
Value
string
Man
- 10;20;30;40;70;90
+ 10;90
True
False
UploadInput
@@ -98,7 +98,7 @@
File
tif
Man
- F:\Tian-GF3-Wenchang\NDVI\S2_NDVImed_SMC_GF3.tif
+ F:\2024xibei\S2_NDVImed2.tif
True
False
File
@@ -130,7 +130,7 @@
File
tar.gz
Man
- D:\micro\WorkSpace\SurfaceRoughness\Output\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho-SR.tar.gz
+ D:\micro\WorkSpace\SurfaceRoughness\Output\GF3B_KSC_QPSI_010328_E86.0_N44.7_20231109_L1A_AHV_L10000262135-ortho-SR.tar.gz
diff --git a/surfaceRoughness_oh2004/SurfaceRoughnessTool.py b/surfaceRoughness_oh2004/SurfaceRoughnessTool.py
index d29d0d10..277069c6 100644
--- a/surfaceRoughness_oh2004/SurfaceRoughnessTool.py
+++ b/surfaceRoughness_oh2004/SurfaceRoughnessTool.py
@@ -43,7 +43,13 @@ class SoilMoistureTool:
polarization = ['HH', 'HV', 'VH', 'VV']
calibration = Calibration.get_Calibration_coefficient(self.xml_path, polarization)
tif_path = atp.calibration(calibration, in_ahv_dir=self.__workspace_processing_path)
- atp.ahv_to_polsarpro_t3_soil(t3_path, tif_path)
+
+ inc_path = self.incidence_file
+ # in_tif_paths = list(glob.glob(os.path.join(t3_path, '*.tif')))
+ # rows = ImageHandler.get_img_height(in_tif_paths[0])
+ # ImageHandler.get_inc_angle(self.xml_path, rows, 1, inc_path)
+
+ atp.ahv_to_polsarpro_t3_soil(t3_path, inc_path, tif_path)
# Lee滤波
leeFilter = LeeRefinedFilterT3()
diff --git a/tool/algorithm/algtools/MetaDataHandler.py b/tool/algorithm/algtools/MetaDataHandler.py
index 931c5b9b..8017215d 100644
--- a/tool/algorithm/algtools/MetaDataHandler.py
+++ b/tool/algorithm/algtools/MetaDataHandler.py
@@ -124,21 +124,25 @@ class Calibration:
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[0] = math.sqrt(data_value)
+ print("HH", calibration[0])
if i == 'HV':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[1] = math.sqrt(data_value)
+ print("HV", calibration[1])
if i == 'VH':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[2] = math.sqrt(data_value)
+ print("VH", calibration[2])
if i == 'VV':
quality = MetaDataHandler.get_QualifyValue(meta_file_path, i)
kdb = MetaDataHandler.get_Kdb(meta_file_path, i)
data_value = ((quality/32767)**2) * (10**((kdb/10)*-1))
calibration[3] = math.sqrt(data_value)
+ print("VV", calibration[3])
return calibration
diff --git a/tool/algorithm/algtools/PreProcess.py b/tool/algorithm/algtools/PreProcess.py
index b9148dba..8dcee787 100644
--- a/tool/algorithm/algtools/PreProcess.py
+++ b/tool/algorithm/algtools/PreProcess.py
@@ -27,7 +27,7 @@ from tool.algorithm.image.ImageHandle import ImageHandler
import logging
logger = logging.getLogger("mylog")
#
-# os.environ['PROJ_LIB'] = os.getcwd()
+os.environ['PROJ_LIB'] = os.getcwd()
@@ -207,7 +207,7 @@ class PreProcess:
def trans_epsg4326(out_path, in_path):
OutTile = gdal.Warp(out_path, in_path,
dstSRS='EPSG:4326',
- resampleAlg=gdalconst.GRA_Bilinear
+ resampleAlg=gdalconst.GRA_NearestNeighbour
)
OutTile = None
return True
@@ -414,7 +414,7 @@ class PreProcess:
input_dataset = gdal.Open(input_path)
- gdal.Warp(output_path, input_dataset, format='GTiff', outputBounds=box, cutlineDSName=shp_path, dstNodata=-9999)
+ gdal.Warp(output_path, input_dataset, format='GTiff', outputBounds=box, cutlineDSName=shp_path, dstNodata=0, resampleAlg='nearest')
# cutlineWhere="FIELD = ‘whatever’",
# optionally you can filter your cutline (shapefile) based on attribute values
# select the no data value you like
@@ -582,8 +582,18 @@ class PreProcess:
if __name__ == '__main__':
- fn = r"F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1B_v_h_L10006928143-ortho.tif"
- out = r"F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1B_v_h_L10006928143-ortho.tif"
- PreProcess.resample_by_gdal(fn, out)
+ # fn = r"F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1B_v_h_L10006928143-ortho.tif"
+ # out = r"F:\Tian-GF3-Wenchang\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1B_v_h_L10006928143-ortho.tif"
+ # PreProcess.resample_by_gdal(fn, out)
+ inTif = r"D:\micro\WorkSpace\VegetationPhenology\Temporary\preprocessing\20240427_Covering_cut.tif"
+ outTif = r"D:\micro\WorkSpace\VegetationPhenology\Temporary\preprocessing\20240427_Covering_cut_re.tif"
+ shpPath = r"D:\micro\test\GF3_MYC_QPSI_040611_E110.6_N20.0_20240427_L1A_AHV_L10006928143-ortho-SSAA.tif"
+ tarTif = r"D:\micro\VP_class\band_merged.tif"
+ # PreProcess.resampling_by_scale(inTif, outTif, tarTif)
+
+ print(10 * np.log10(0))
+
+
+ # PreProcess.cut_img(outTif, inTif, shpPath)
diff --git a/tool/algorithm/algtools/ROIAlg.py b/tool/algorithm/algtools/ROIAlg.py
index c441dcaa..c84adb4a 100644
--- a/tool/algorithm/algtools/ROIAlg.py
+++ b/tool/algorithm/algtools/ROIAlg.py
@@ -230,10 +230,13 @@ class ROIAlg:
return True
if __name__ == '__main__':
- dir = r'G:\MicroWorkspace\C-SAR\SoilMoisture\Temporary\processing/'
- out_tif_path = r'D:\micro\WorkSpace\SurfaceRoughness\Temporary\SurfaceRoughnessProduct_test.tif'
- in_tif_path = r'D:\micro\WorkSpace\SurfaceRoughness\Temporary\SurfaceRoughnessProduct_geo.tif'
- mask_path = r'D:\micro\WorkSpace\SurfaceRoughness\Temporary\processing\roi\ndvi_mask.tif'
- background_value = 0
- ROIAlg.cal_roi(out_tif_path, in_tif_path, mask_path, background_value)
+ # dir = r'G:\MicroWorkspace\C-SAR\SoilMoisture\Temporary\processing/'
+ out_tif_path = r"D:\BaiduNetdiskDownload\veg_landcover_re_mask.tif"
+ in_tif_path = r"D:\BaiduNetdiskDownload\veg_landcover_re.tif"
+ # mask_path = r'D:\micro\WorkSpace\SurfaceRoughness\Temporary\processing\roi\ndvi_mask.tif'
+ # background_value = 0
+ # ROIAlg.cal_roi(out_tif_path, in_tif_path, mask_path, background_value)
+ cover_id_list = [10, 40, 90]
+
+ ROIAlg.trans_cover2mask(out_tif_path, in_tif_path, cover_id_list)
pass
\ No newline at end of file
diff --git a/tool/algorithm/algtools/filter/lee_Filter.py b/tool/algorithm/algtools/filter/lee_Filter.py
index 1e36e6bc..a0075cef 100644
--- a/tool/algorithm/algtools/filter/lee_Filter.py
+++ b/tool/algorithm/algtools/filter/lee_Filter.py
@@ -8,6 +8,7 @@
@Date:2021/8/30 8:42
@Version:1.0.0
"""
+import glob
import numpy as np
import math
@@ -296,6 +297,20 @@ if __name__ == '__main__':
# f = Filter()
# f.lee_filter(path,path,3)
#示例2:
+ tif_dir = r''
+ out_dir = r''
+ tifs = list(glob.glob(os.path.join(tif_dir, "*.tif")))
+ for tif in tifs:
+ name = os.path.splitext(os.path.basename(tif))[0]
+ outPath = os.path.join(out_dir, name)
+
+ exe_path = r".\baseTool\x64\Release\SIMOrthoProgram.exe"
+ exe_cmd = r"set PROJ_LIB=.\baseTool\x64\Release; & {0} {1} {2} {3} {4} {5}".format(exe_path, 12, tif,
+ outPath, 7, 0.25)
+ print(exe_cmd)
+ print(os.system(exe_cmd))
+ print("==========================================================================")
+
f = Filter()
f.lee_filter_block_multiprocess('I:\preprocessed\HH.tif','I:\preprocessed\HHf.tif')
diff --git a/tool/algorithm/image/ImageHandle.py b/tool/algorithm/image/ImageHandle.py
index b81e4a6f..898724ee 100644
--- a/tool/algorithm/image/ImageHandle.py
+++ b/tool/algorithm/image/ImageHandle.py
@@ -928,7 +928,13 @@ if __name__ == '__main__':
# s = ImageHandler().band_merge(path, path2, path3)
# print(s)
# pass
- fn = r"D:\BaiduNetdiskDownload\植被物候\chen_features_warp.tif"
- outP = r'D:\BaiduNetdiskDownload\植被物候\chen_features_warp_LWZ.tif'
- im_proj, im_geotrans, im_arr = ImageHandler.read_img(fn)
- ImageHandler.write_img(outP, im_proj, im_geotrans, im_arr)
+ fileDir = r'D:\micro\WorkSpace\ortho\Temporary\VH_db'
+ outDir = r'D:\micro\WorkSpace\ortho\Temporary\VH_db\test'
+ files = os.listdir(fileDir)
+ for file in files:
+ tifFile = os.path.join(fileDir, file)
+ outFile = os.path.join(outDir, file)
+ im_proj, im_geotrans, im_arr = ImageHandler.read_img(tifFile)
+ im_arr[np.isnan(im_arr)] = 0
+ im_arr[np.isinf(im_arr)] = 0
+ ImageHandler.write_img(outFile, im_proj, im_geotrans, im_arr, '0')
diff --git a/tool/algorithm/ml/machineLearning.py b/tool/algorithm/ml/machineLearning.py
index 65ab9269..55e784a6 100644
--- a/tool/algorithm/ml/machineLearning.py
+++ b/tool/algorithm/ml/machineLearning.py
@@ -180,7 +180,7 @@ class MachineLeaning:
# features_array = ImageHandler.get_data(path)
band = ImageHandler.get_bands(path)
if band == 1:
- features_array = np.zeros((1, 1024, 1024), dtype=float)
+ features_array = np.zeros((1, block_size, block_size), dtype=float)
feature_array = ImageHandler.get_data(path)
features_array[0, :, :] = feature_array
else:
diff --git a/tool/algorithm/polsarpro/AHVToPolsarpro.py b/tool/algorithm/polsarpro/AHVToPolsarpro.py
index 6e595a51..207744e8 100644
--- a/tool/algorithm/polsarpro/AHVToPolsarpro.py
+++ b/tool/algorithm/polsarpro/AHVToPolsarpro.py
@@ -72,13 +72,16 @@ class AHVToPolsarpro:
return s11, s12, s21, s22
@staticmethod
- def __ahv_to_s2_soil(ahv_dir):
+ def __ahv_to_s2_soil(ahv_dir, inc_angle):
"""
全极化影像转S2矩阵
:param ahv_dir: 全极化影像文件夹路径
:return: 极化散射矩阵S2
"""
global s11
+ _, _, inc_arr = ImageHandler().read_img(inc_angle)
+ rad_arr = np.radians(inc_arr)
+ sin_arr = np.sin(rad_arr)
in_tif_paths = list(glob.glob(os.path.join(ahv_dir, '*.tif')))
in_tif_paths1 = list(glob.glob(os.path.join(ahv_dir, '*.tiff')))
in_tif_paths += in_tif_paths1
@@ -93,22 +96,22 @@ class AHVToPolsarpro:
if 'HH' in os.path.basename(in_tif_path):
data_real = data[0, :, :]
data_imag = data[1, :, :]
- s11 = data_real + 1j * data_imag
+ s11 = data_real + 1j * data_imag / sin_arr
flag_list[0] = 1
elif 'HV' in os.path.basename(in_tif_path):
data_real = data[0, :, :]
data_imag = data[1, :, :]
- s12 = data_real + 1j * data_imag
+ s12 = data_real + 1j * data_imag / sin_arr
flag_list[1] = 1
elif 'VH' in os.path.basename(in_tif_path):
data_real = data[0, :, :]
data_imag = data[1, :, :]
- s21 = data_real + 1j * data_imag
+ s21 = data_real + 1j * data_imag / sin_arr
flag_list[2] = 1
elif 'VV' in os.path.basename(in_tif_path):
data_real = data[0, :, :]
data_imag = data[1, :, :]
- s22 = data_real + 1j * data_imag
+ s22 = data_real + 1j * data_imag / sin_arr
flag_list[3] = 1
else:
continue
@@ -380,10 +383,10 @@ class AHVToPolsarpro:
self.__t3_to_polsarpro_t3(out_file_dir, t11, t12, t13, t22, t23, t33)
- def ahv_to_polsarpro_t3_soil(self, out_file_dir, in_ahv_dir=''):
+ def ahv_to_polsarpro_t3_soil(self, out_file_dir, inc_path, in_ahv_dir=''):
if self._hh_hv_vh_vv_path_list == [] :
- s11, s12, s21, s22 = self.__ahv_to_s2_soil(in_ahv_dir)
+ s11, s12, s21, s22 = self.__ahv_to_s2_soil(in_ahv_dir, inc_path)
else:
s11, s12, s21, s22 = self.__ahv_to_s2_list_2(self._hh_hv_vh_vv_path_list)
diff --git a/tool/algorithm/polsarpro/pspSurfaceInversion.py b/tool/algorithm/polsarpro/pspSurfaceInversion.py
index b8463a2f..f951edfe 100644
--- a/tool/algorithm/polsarpro/pspSurfaceInversion.py
+++ b/tool/algorithm/polsarpro/pspSurfaceInversion.py
@@ -343,8 +343,8 @@ class SurfaceInversionOh2004:
Sub_Nlig = row
Sub_Ncol = col
dataFormat = 'T3'
- threshold_mv = 1.0
- threshold_s = 7.0
+ threshold_mv = 0.15
+ threshold_s = 2.5
para_list = [
exePath,
diff --git a/tool/algorithm/transforml1a/transHandle.py b/tool/algorithm/transforml1a/transHandle.py
index 6e5e0634..dfa5f827 100644
--- a/tool/algorithm/transforml1a/transHandle.py
+++ b/tool/algorithm/transforml1a/transHandle.py
@@ -989,7 +989,11 @@ class TransImgL1A_ori:
source_def = geometry.SwathDefinition(lons=lon_data, lats=lat_data)
lalo_step = [pixel_delta_x, -pixel_delta_y]
radius = TransImgL1A.get_radius_of_influence(lalo_step, src_meta='radar2geo')
- geo_produc = pr.bilinear.NumpyBilinearResampler(source_def, target_def, neighbours=32, radius_of_influence=radius, epsilon=0).resample(l1a_produc)
+ # geo_produc = pr.bilinear.NumpyBilinearResampler(source_def, target_def, neighbours=32, radius_of_influence=radius, epsilon=0).resample(l1a_produc)
+ geo_produc = pr.bilinear.resample_bilinear(l1a_produc, source_def, target_def,
+ radius=radius, neighbours=32,
+ nprocs=80, fill_value=0,
+ epsilon=0)
# geo_produc = pr.bilinear.resample_bilinear(l1a_produc, source_def, target_def,
# radius=radius, neighbours=32,
# nprocs=8, fill_value=np.nan,
@@ -1064,9 +1068,9 @@ if __name__ == '__main__':
#
# tr = TransImgL1A(ori_sim_path,roi_Extend)
# tr.l1a_2_geo("I:/cut.tif", "I:/salinity.tif", "I:/salinity_geo2.tif")
- ori_sim = r"D:\micro\WorkSpace\AtmosphericDelay\Temporary\cut\ori_sim.tif"
- product_tif = r"D:\micro\LWork\AtmosphericDelay\test\pro_m_filt_fine_ERA5.tif"
- result = r"D:\micro\LWork\AtmosphericDelay\test\pro_m_filt_fine_ERA5_geo.tif"
+ ori_sim = r"D:\micro\LWork\AtmosphericDelay\Temporary\cut\ori_sim.tif"
+ product_tif = r"D:\micro\LWork\AtmosphericDelay\Temporary\apsWork\master_ztd\dlosout.tiff"
+ result = r"D:\micro\LWork\AtmosphericDelay\Temporary\apsWork\master_ztd\dlosout_geo.tiff"
method = 'linear'
scopes = ImageHandle.ImageHandler().get_scopes(ori_sim)
# """
diff --git a/vegetationPhenology/SurfaceRoughnessMain.spec b/vegetationPhenology/SurfaceRoughnessMain.spec
deleted file mode 100644
index fe53ee34..00000000
--- a/vegetationPhenology/SurfaceRoughnessMain.spec
+++ /dev/null
@@ -1,33 +0,0 @@
-# -*- mode: python ; coding: utf-8 -*-
-
-block_cipher = None
-
-
-a = Analysis(['SurfaceRoughnessMain.py'],
- pathex=['D:\\Project\\microproduct\\vegetationPhenology'],
- binaries=[],
- datas=[('E:/soft/Anaconda/envs/micro/Lib/site-packages/dask/dask.yaml', './dask'), ('E:/soft/Anaconda/envs/micro/Lib/site-packages/distributed/distributed.yaml', './distributed')],
- hiddenimports=[],
- hookspath=[],
- runtime_hooks=[],
- excludes=[],
- win_no_prefer_redirects=False,
- win_private_assemblies=False,
- cipher=block_cipher,
- noarchive=False)
-pyz = PYZ(a.pure, a.zipped_data,
- cipher=block_cipher)
-exe = EXE(pyz,
- a.scripts,
- a.binaries,
- a.zipfiles,
- a.datas,
- [],
- name='SurfaceRoughnessMain',
- debug=False,
- bootloader_ignore_signals=False,
- strip=False,
- upx=True,
- upx_exclude=[],
- runtime_tmpdir=None,
- console=True )
diff --git a/vegetationPhenology/VegetationPhenology.xml b/vegetationPhenology/VegetationPhenology.xml
deleted file mode 100644
index db6ffd33..00000000
--- a/vegetationPhenology/VegetationPhenology.xml
+++ /dev/null
@@ -1,143 +0,0 @@
-
-
- CSAR_202107275419_0001-0
- D:\micro\WorkSpace\
-
- File
- ElementAlg
- VegetationPhenology-C-SAR-V2.1
- VegetationPhenology-C-SAR-V2.1.exe
- 植被物候产品
- 微波卫星3-5级产品生产模型
- 2.1
- 植被类产品_植被物候产品
- 5
- VegetationPhenology_中科卫星应用德清研究院_2.1
- 中科卫星应用德清研究院
- 景-算法
- 1.8
- python
-
- 0
- 0
- Windows10F
- 双核
- 4GB
- 2GB
- 无需求
- 无需求
- 无需求
-
-
-
-
- AHVS
- 全极化影像集
- 多景全极化影像,每一个物候期至少对应一景影像,多个路径用';'间隔
- File
- tar.gz
- Man
-
- F:\VegetationPhenology-likun\rusuoces\GF3C_MYC_QPSI_006270_E100.4_N27.0_20230615_L1A_AHV_L10000158764-ortho.tar.gz
-
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
- MeasuredData
- 植被物候类型数据
- 植被物候类型数据
- File
- csv
- Man
- D:\BaiduNetdiskDownload\植被物候\vegetationRoi_new.csv
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
- Covering
- 地表覆盖类型数据
- 经过地理定标(WGS84)的地表覆盖类型数据
- File
- tif
- Man
- F:\VegetationPhenology-likun\rusuoces\N47_25_2020LC030\n47_25_2020lc030.tif
- True
- False
- File
- Aux
- 1
- Aux
-
-
- CoveringIDs
- 地表覆盖类型数据的类别id
- 地表覆盖类型数据中植被区域、裸土区域的类别id,多个id使用“;”分割。示例:GlobeLand30数据的cover_roi_ids = 10;20;30;40;50;70;71;72;83;74;90
- Value
- string
- Man
- 10
- True
- False
- UploadInput
- Aux
- 0
- Aux
-
-
- box
- 经纬度包围盒
- 经纬度包围盒SNWE。例子31.527611633687169;31.397153571335469;118.94152;119.02 new不填写则使用影像相交区域作为包围盒38.171;38.1878;114.36567;114.39168
- Value
- string
- Man
- empty
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
- FeatureCombination
- 极化特征组合
- 可选极化特征组合一、共10种特征(编号依次为0-9)
- Freeman:表面散射p_s(0)、偶次散射p_d(1)、体散射p_v(2);
- Yamaguchi:表面散射f_s(3)、二次散射f_d(4)、体散射f_v(5)、螺旋体散射f_h(6);
- Cloude-Pottier:分解散射熵H(7)、反熵A(8)、平均散射角α(9)
- Value
- string
- Man
- 0,1,2,3,4,5,6,7,8,9
- True
- True
- UploadInput
- Aux
- 0
- Aux
-
-
-
-
- VegetationPhenologyProduct
- 植被物候产品
- 植被物候产品
- File
- tar.gz
- Man
- D:\micro\WorkSpace\VegetationPhenology\Output\GF3C_MYC_QPSI_006270_E100.4_N27.0_20230615_L1A_AHV_L10000158764-ortho-VP.tar.gz
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
-
-
\ No newline at end of file
diff --git a/vegetationPhenology/VegetationPhenologyAuxData.py b/vegetationPhenology/VegetationPhenologyAuxData.py
index f381b46a..6dc9cff2 100644
--- a/vegetationPhenology/VegetationPhenologyAuxData.py
+++ b/vegetationPhenology/VegetationPhenologyAuxData.py
@@ -283,7 +283,8 @@ class PhenoloyMeasCsv_geo:
n = 1
train_data_list = []
for data in meas_data:
-
+ if len(data)==0:
+ continue
for d in data:
if d == '':
raise Exception('there are empty data!', data)
@@ -319,22 +320,22 @@ class PhenoloyMeasCsv_geo:
train_data[3] = train_data[3] + self.__render(poly)
if train_data[3] == [] :
pass
- # raise Exception('there are empty data!', train_data)
- #
- # if len(train_data_list) <= 1:
- # raise Exception('there is only one label type!', train_data_list)
- #
- # num_list = []
- # for train_data in train_data_list:
- # if not len(train_data[3]) == 0:
- # num_list.append(len(train_data[3]))
- # max_num = np.min(num_list)
- # for train_data in train_data_list:
- # logger.info(str(train_data[0]) + "," + str(train_data[2]) +"," + "num:" + str(len(train_data[3])))
- # # max_num = self.__max_tran__num_per_class
- # logger.info("max number =" + str(max_num) +", random select"+str(max_num)+" point as train data!")
- # if(len(train_data[3]) > max_num):
- # train_data[3] = random.sample(train_data[3], max_num)
+ raise Exception('there are empty data!', train_data)
+
+ if len(train_data_list) <= 1:
+ raise Exception('there is only one label type!', train_data_list)
+
+ num_list = []
+ for train_data in train_data_list:
+ if not len(train_data[3]) == 0:
+ num_list.append(len(train_data[3]))
+ max_num = np.min(num_list)
+ for train_data in train_data_list:
+ logger.info(str(train_data[0]) + "," + str(train_data[2]) +"," + "num:" + str(len(train_data[3])))
+ # max_num = self.__max_tran__num_per_class
+ logger.info("max number =" + str(max_num) +", random select"+str(max_num)+" point as train data!")
+ if(len(train_data[3]) > max_num):
+ train_data[3] = random.sample(train_data[3], max_num)
return train_data_list
@@ -381,7 +382,7 @@ class PhenoloyMeasCsv_geo:
:return pointList: 保存多边形的list
"""
pointList = []
- strContent = roiStr.replace("POLYGON", "")
+ strContent = roiStr.replace("POLYGON", "").replace(", ", ",")
# 解析轮廓字符串为二维数组
bracketsList = []
strTemp = ''
diff --git a/vegetationPhenology/VegetationPhenologyMain.py b/vegetationPhenology/VegetationPhenologyMain.py
index 32d5faae..92ba439e 100644
--- a/vegetationPhenology/VegetationPhenologyMain.py
+++ b/vegetationPhenology/VegetationPhenologyMain.py
@@ -410,7 +410,7 @@ class PhenologyMain:
logger.info("generate train and test set success!")
logger.info('progress bar: 30%')
- optimal_X_train, optimal_Y_train, optimal_feature = ml.sel_optimal_feature(X_train, Y_train, total_name_list,important_threshold=0.0, correlation_threshold=20)
+ optimal_X_train, optimal_Y_train, optimal_feature = ml.sel_optimal_feature(X_train, Y_train, total_name_list,important_threshold=0.07, correlation_threshold=0.85)
# RF
clf = ml.trainRF(optimal_X_train, optimal_Y_train)
@@ -629,6 +629,7 @@ if __name__ == '__main__':
logger.exception("run-time error!")
finally:
main_handler.del_temp_workspace()
+ pass
end = datetime.datetime.now()
msg = 'running use time: %s ' % (end - start)
diff --git a/vegetationPhenology/VegetationPhenology_C_SAR_V3.xml b/vegetationPhenology/VegetationPhenology_C_SAR_V3.xml
deleted file mode 100644
index 6a9033f1..00000000
--- a/vegetationPhenology/VegetationPhenology_C_SAR_V3.xml
+++ /dev/null
@@ -1,184 +0,0 @@
-
-
- CSAR_202107275419_0001-0
- D:\micro\WorkSpace\
-
- File
- ElementAlg
- VegetationPhenology_C_SAR_V3
- VegetationPhenology_C_SAR_V3.exe
- 植被物候产品
- 微波卫星3-5级产品生产模型
- 3
- 植被类产品_植被物候产品
- VegetationPhenology_C_SAR_V3
- 5
- VegetationPhenology_CSAR_中科卫星应用德清研究院_3
- 中科卫星应用德清研究院
- 景-算法
- 1.8
- python
-
- 0
- 0
- Windows10
- 双核
- 32GB
- 64GB
- 无需求
-
- 无需求
- 无需求
-
-
-
-
- AHVS
- 全极化影像集
- 多景全极化影像,每一个物候期至少对应一景影像,多个路径用';'间隔
- File
- tar.gz
- Cal
-
- E:\MicroWorkspace\GF3A_nanjing\input-ortho\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho.tar.gz;
- E:\MicroWorkspace\GF3A_nanjing\input-ortho\GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_AHV_L10003923848-ortho.tar.gz
- True
- False
- File
- production
- 2
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- Ortho_CSAR_中科卫星应用德清研究院_3
-
-
- MeasuredData
- 植被物候类型数据
- 植被物候类型数据
- File
- csv
- Cal
-
- E:\MicroWorkspace\GF3A_nanjing\VegetationPhenologyMeasureData_E118.9_N31.4_37_5.csv
- True
- False
- UploadTable
- production
- 1
- AUX
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
- Covering
- 地表覆盖类型数据
- 经过地理定标(WGS84)的地表覆盖类型数据
- File
- tif
- DEFAULT
- DEFAULT
- DEFAULT
- Cal
- E:\202306pj\n51_40_2020lc030.tif
- True
- False
- UploadTable
- Aux
- 1
- Aux
-
-
- CoveringIDs
- 地表覆盖类型数据的类别id
- 地表覆盖类型数据中植被区域、裸土区域的类别id,多个id使用“;”分割。示例:GlobeLand30数据的cover_roi_ids =
- 10;20;30;40;50;70;71;72;83;74;90
- Value
- string
- DEFAULT
- DEFAULT
- DEFAULT
- Man
- 10;20;30;40;50;60;70;71;72;80;83;74;90;100
- True
- False
- UploadInput
- Aux
- 0
- Aux
-
-
- box
- 经纬度包围盒
- 经纬度包围盒SNWE。例子31.527611633687169;31.397153571335469;118.94152;119.02
- new不填写则使用影像相交区域作为包围盒38.171;38.1878;114.36567;114.39168
- Value
- string
- Man
- DEFAULT
- DEFAULT
- DEFAULT
- empty
- True
- False
- UploadInput
- Aux
- 0
- Aux
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
- FeatureCombination
- 极化特征组合
- 可选极化特征组合一、共10种特征(编号依次为0-9)
- Freeman:表面散射p_s(0)、偶次散射p_d(1)、体散射p_v(2);
- Yamaguchi:表面散射f_s(3)、二次散射f_d(4)、体散射f_v(5)、螺旋体散射f_h(6);
- Cloude-Pottier:分解散射熵H(7)、反熵A(8)、平均散射角α(9)
- Value
- string
- DEFAULT
- DEFAULT
- DEFAULT
- Man
- 0,1,2,7,8,9,10
- True
- False
- UploadInput
- Aux
- 0
- Aux
- True
- True
- UploadInput
- Aux
- 0
- Aux
-
-
-
-
- VegetationPhenologyProduct
- 植被物候产品
- 植被物候产品
- File
- tar.gz
- Man
-
- D:\micro\WorkSpace\VegetationPhenology\Output\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho-VP.tar.gz
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
- DEFAULT
-
-
-
-
\ No newline at end of file
diff --git a/vegetationPhenology/testxmlreading.py b/vegetationPhenology/testxmlreading.py
index 4b455f95..18e76790 100644
--- a/vegetationPhenology/testxmlreading.py
+++ b/vegetationPhenology/testxmlreading.py
@@ -51,12 +51,12 @@ def createcsv_roi_polygon(coordinates):
return polygon_str
if __name__ == '__main__':
- xmlpath = r"F:\MicroWorkspace\20240814tw\shp\20240819\landCover.xml"
+ xmlpath = r"G:\地表覆盖特征图-训练-吴学箫\2024010_地物分类_feature_POL\hecheng\ROI\rengonggouxuan_3.xml"
tree_obj = ET.parse(xmlpath)
- # csv_header = ['sar_img_name', 'phenology_id', 'phenology_name', 'roi_polygon']
- csv_header = ['parent_id', 'id', 'covernm', 'roi_polygon']
- csvpath = r"F:\MicroWorkspace\20240814tw\shp\20240819\LandCover.csv"
+ csv_header = ['sar_img_name', 'phenology_id', 'phenology_name', 'roi_polygon']
+ # csv_header = ['parent_id', 'id', 'covernm', 'roi_polygon']
+ csvpath = r"G:\地表覆盖特征图-训练-吴学箫\2024010_地物分类_feature_POL\hecheng\ROI\rengonggouxuan_3.csv"
# csvcreateTitile(csvpath,csv_header)
csvfile(csvpath,csv_header)
# 得到所有匹配Region 标签的Element对象的list集合