diff --git a/.idea/misc.xml b/.idea/misc.xml
index 70a0a935..abf6bf18 100644
--- a/.idea/misc.xml
+++ b/.idea/misc.xml
@@ -1,4 +1,7 @@
+
+
+
\ No newline at end of file
diff --git a/Ortho/OrthOne.spec b/Ortho/OrthOne.spec
deleted file mode 100644
index 925f5ec9..00000000
--- a/Ortho/OrthOne.spec
+++ /dev/null
@@ -1,49 +0,0 @@
-# -*- mode: python ; coding: utf-8 -*-
-
-
-block_cipher = None
-
-
-a = Analysis(
- ['OrthoMain.py'],
- pathex=['.'],
- binaries=[],
- datas=[],
- hiddenimports=[],
- hookspath=[],
- runtime_hooks=[],
- excludes=[],
- win_no_prefer_redirects=False,
- win_private_assemblies=False,
- cipher=block_cipher,
- noarchive=False,
-)
-pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
-
-exe = EXE(
- pyz,
- a.scripts,
- [],
- exclude_binaries=True,
- name='OrthOne',
- debug=False,
- bootloader_ignore_signals=False,
- strip=False,
- upx=True,
- console=True,
- disable_windowed_traceback=False,
- argv_emulation=False,
- target_arch=None,
- codesign_identity=None,
- entitlements_file=None,
-)
-coll = COLLECT(
- exe,
- a.binaries,
- a.zipfiles,
- a.datas,
- strip=False,
- upx=True,
- upx_exclude=[],
- name='OrthOne',
-)
diff --git a/Ortho/Ortho.xml b/Ortho/Ortho.xml
index ed1ab405..9332773f 100644
--- a/Ortho/Ortho.xml
+++ b/Ortho/Ortho.xml
@@ -45,7 +45,7 @@
File
tar.gz
Cal
- F:\MicroWorkspace\def_GF3\GF3_KSC_FSII_036108_E120.1_N31.0_20230619_L1A_VHVV_L10006793347.tar.gz
+ F:\MicroWorkspace\原老师新增干涉数据20240413\GF3_SYC_FSII_036654_E120.0_N30.7_20230727_L1A_HHHV_L10006808388.tar.gz
True
False
File
@@ -92,7 +92,7 @@
File
tar.gz
Cal
- D:\micro\WorkSpace\ortho\Output\GF3_KSC_FSII_036108_E120.1_N31.0_20230619_L1A_VHVV_L10006793347-ortho.tar.gz
+ D:\micro\WorkSpace\ortho\Output\GF3_SYC_FSII_036654_E120.0_N30.7_20230727_L1A_HHHV_L10006808388-ortho.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/Ortho/OrthoMain.py b/Ortho/OrthoMain.py
index 74cd0d42..6dce9eac 100644
--- a/Ortho/OrthoMain.py
+++ b/Ortho/OrthoMain.py
@@ -384,7 +384,7 @@ class OrthoMain:
if CorrectMethod.get('CorrectMethod') == '1' or CorrectMethod.get('CorrectMethod') == 1:
logger.info("CorrectMethod is RPC!")
- return self.RPC_process_handle()
+ return self.RD_process_handle()
elif CorrectMethod.get('CorrectMethod') == '2' or CorrectMethod.get('CorrectMethod') == 2:
logger.info("CorrectMethod is RD!")
@@ -393,8 +393,6 @@ class OrthoMain:
else:
raise Exception('No CorrectMethod')
-
-
def RPC_process_handle(self):
logger.info(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
@@ -662,6 +660,8 @@ class OrthoMain:
if file_type in ["xml"]:
output = os.path.join(self.__workspace_package_path, filename)
shutil.copy(apath, output)
+ elif 'lin' in filename:
+ continue
else:
output=os.path.join(self.__workspace_package_path, filename)
shutil.copy(apath, output)
diff --git a/Ortho/OrthoMain.spec b/Ortho/OrthoMain.spec
index af31838c..2148d46a 100644
--- a/Ortho/OrthoMain.spec
+++ b/Ortho/OrthoMain.spec
@@ -1,6 +1,29 @@
# -*- mode: python ; coding: utf-8 -*-
+import sys
+from shutil import copy
+import os
+cwdpath = os.getcwd()
+toolDir = os.path.join(cwdpath, 'tool')
+if os.path.exists(toolDir):
+ os.remove(toolDir)
+os.mkdir(toolDir)
+source_folder = '../tool'
+
+def copy_file(path_read, path_write):
+ names = os.listdir(path_read)
+ for name in names:
+ path_read_new = os.path.join(path_read, name)
+ path_write_new = os.path.join(path_write, name)
+ if os.path.isdir(path_read_new):
+ if not os.path.exists(path_write_new):
+ os.mkdir(path_write_new)
+ copy_file(path_read_new, path_write_new)
+ else:
+ copy(path_read_new, path_write_new)
+
+copy_file(source_folder, toolDir)
block_cipher = None
diff --git a/atmosphericDelay-C-SAR/AtmosphericDelay.xml b/atmosphericDelay-C-SAR/AtmosphericDelay.xml
index c67e3e34..5fb0ed9a 100644
--- a/atmosphericDelay-C-SAR/AtmosphericDelay.xml
+++ b/atmosphericDelay-C-SAR/AtmosphericDelay.xml
@@ -1,7 +1,7 @@
CSAR_202107275419_0001-0
- E:\Result_GF3\
+ D:\micro\WorkSpace\
File
ElementAlg
@@ -9,20 +9,20 @@
AtmosphericDelay-C-SAR-V2.2.exe
大气延迟校正产品
微波卫星3-5级产品生产模型
- AtmosphericDelay-C-SAR-V2.2-1
+ AtmosphericDelay-C-SAR-V2.2-1
2.2
辐射类产品_大气延迟校正
5
- AtmosphericDelay_中科卫星应用德清研究院_2.2
+ AtmosphericDelay_中科卫星应用德清研究院_2.2
中科卫星应用德清研究院
景-算法
-
+
-
+
1.8
python
-
+
0
0
Windows10
@@ -33,68 +33,71 @@
无需求
无需求
-
+
-
- MasterSarData
- 主影像
- 经过几何校正和地形校正的SAR影像产品
- File
- tar.gz
- Man
- E:\GF3Data\AtmophericDealy\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686.tar.gz
- True
- False
- File
- Satellite
- 1
- S1A
-
- AuxiliarySarData
- 辅影像
- 经过几何校正和地形校正的SAR影像产品
+ MainImg
+ 主影像时间
+ 哨兵数据主影像的时间,来源于数据名称
+ Value
+ string
+ Man
+ 20161129
+ True
+ False
+ UploadInput
+ Aux
+ 0
+ Aux
+
+
+ SARS
+ SAR影像文件夹路径
+ 哨兵1号数据存放的文件夹
File
tar.gz
- Man
- E:\GF3Data\AtmophericDealy\GF3_KAS_FSI_002034_E113.4_N34.7_20161228_L1A_HHHV_L10002077539.tar.gz
- True
- False
- File
+ Man
+
+ G:\辅助数据\GF3Data\AtmophericDealy\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686.tar.gz;
+ G:\辅助数据\GF3Data\AtmophericDealy\GF3_KAS_FSI_002034_E113.4_N34.7_20161228_L1A_HHHV_L10002077539.tar.gz
+
+ True
+ False
+ File
Satellite
- 1
+ 0
S1A
-
-
+
+
MasterNC
主影像气象数据
当DataType:nc时,ParaValue处填写下载的ERA文件的路径,且气象数据须包含变量:longitude、latitude、
- pressure_level、time、Geopotential、Relative humidity、Temperature;当DataType:file时,ParaValue处
- 填写文件夹路径,且文件夹中需包含三个tif格式的气象参数:相对湿度、温度和位势能
+ pressure_level、time、Geopotential、Relative humidity、Temperature;当DataType:file时,ParaValue处
+ 填写文件夹路径,且文件夹中需包含三个tif格式的气象参数:相对湿度、温度和位势能
File
zip
- Man
- E:\GF3Data\AtmophericDealy\CASR_atmo_ERA5_N33_N36_E112_E115_20161228_22.zip
- True
- False
- File
+ Man
+ G:\辅助数据\GF3Data\AtmophericDealy\CASR_atmo_ERA5_N33_N36_E112_E115_20161228_22.zip
+ True
+ False
+ File
Aux
1
Aux
-
-
+
+
AuxiliaryNC
辅影像气象数据
当DataType:nc时,ParaValue处填写下载的ERA文件的路径,且气象数据须包含变量:longitude、latitude、
- pressure_level、time、Geopotential、Relative humidity、Temperature;当DataType:file时,ParaValue处
- 填写文件夹路径,且文件夹中需包含三个tif格式的气象参数:相对湿度、温度和位势能
+ pressure_level、time、Geopotential、Relative humidity、Temperature;当DataType:file时,ParaValue处
+ 填写文件夹路径,且文件夹中需包含三个tif格式的气象参数:相对湿度、温度和位势能
File
zip
- Man
- E:\GF3Data\AtmophericDealy\CASR_atmo_ERA5_N33_N36_E112_E115_20161129_22.zip
- True
- False
- File
+ Man
+ G:\辅助数据\GF3Data\AtmophericDealy\CASR_atmo_ERA5_N33_N36_E112_E115_20161129_22.zip
+ True
+ False
+ File
Aux
1
Aux
@@ -105,30 +108,34 @@
高程数据数据。数据来源:30米 ASTGTM2, 数据格式:tif。备注:数据的经纬度范围必须是整数
File
zip
- Man
- E:\GF3Data\AtmophericDealy\CASR_atmo_ASTGTM2_N34E113_dem.zip;E:\GF3Data\AtmophericDealy\CASR_atmo_ASTGTM2_N34E114_dem.zip;E:\GF3Data\AtmophericDealy\CASR_atmo_ASTGTM2_N35E113_dem.zip;E:\GF3Data\AtmophericDealy\CASR_atmo_ASTGTM2_N35E114_dem.zip
- True
- False
- File
+ Man
+
+ G:\辅助数据\GF3Data\Deformation\CASR_df_ASTGTM2_N34E113_dem.zip;
+ G:\辅助数据\GF3Data\Deformation\CASR_df_ASTGTM2_N34E114_dem.zip;
+ G:\辅助数据\GF3Data\Deformation\CASR_df_ASTGTM2_N35E113_dem.zip;
+ G:\辅助数据\GF3Data\Deformation\CASR_df_ASTGTM2_N35E114_dem.zip
+ True
+ False
+ File
DEM
0
DEM
- box
+ box
经纬度包围盒
经纬度包围盒SNWE。例子:30.0;30.2;117.3;117.5
Value
string
- Man
- 34.60;34.67;113.05;113.18
- True
- True
- UploadInput
+ Man
+ 34.64;34.67;113.15;113.18
+ True
+ True
+ UploadInput
Aux
0
Aux
-
+
@@ -137,8 +144,8 @@
大气延迟校正产品反演
File
tar.gz
- Man
- E:\Result_GF3\AtmosphericDelay\Output\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686-IADC.tar.gz
+ Man
+ D:\micro\WorkSpace\AtmosphericDelay\Output\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686
diff --git a/atmosphericDelay-C-SAR/AtmosphericDelayMain.py b/atmosphericDelay-C-SAR/AtmosphericDelayMain.py
index a7ef87cd..59262c88 100644
--- a/atmosphericDelay-C-SAR/AtmosphericDelayMain.py
+++ b/atmosphericDelay-C-SAR/AtmosphericDelayMain.py
@@ -106,9 +106,10 @@ class AtmosphericMain:
self.__create_work_space()
self.__input_paras = self.__alg_xml_handler.get_input_paras() # 获取输入文件夹中的数据名、类型、地址
self.__processing_paras = self.__init_processing_paras(self.__input_paras, self.__workspace_preprocessed_path) # 输出{文件名:地址}
+ SrcImagePath = self.__input_paras["SARS"]['ParaValue']
+ paths = SrcImagePath.split(';')
SrcImageName = os.path.split(self.input_paras["MasterSarData"])[1].split('.tar.gz')[0]
- result_name = SrcImageName + tar + ".tar.gz"
- self.__out_para = os.path.join(self.__workspace_path, EXE_NAME, 'Output', result_name)
+ self.__out_para = os.path.join(self.__workspace_path, EXE_NAME, 'Output', SrcImageName)
self.__alg_xml_handler.write_out_para("AtmosphericDelayProduct", self.__out_para) # 写入输出参数
# 判断气象数据是tif格式还是file格式
@@ -162,7 +163,7 @@ class AtmosphericMain:
a_nc = 1
processing_paras.update({name: file_path})
else:
- para_value_list = out_path.split(";")
+ para_value_list = para['ParaValue'].split(";")
if len(para_value_list) == 1:
para_path = para['ParaValue']
if para_path != 'empty' and para_path != '':
@@ -171,7 +172,7 @@ class AtmosphericMain:
else:
for n, para_value_zip in zip(range(len(para_value_list)), para_value_list):
file_path = BlockProcess.unzip_file(para_value_zip, out_path)
- processing_paras.update({name+str(n): file_path})
+ processing_paras.update({name + str(n): file_path})
elif para['DataType'] == 'file':
if name in ["MasterNC", "AuxiliaryNC"]:
processing_paras.update({name: para_path})
@@ -224,19 +225,6 @@ class AtmosphericMain:
shutil.copy(file_path, os.path.join(dem_path, tif_name))
para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
processing_paras.update({'dem': para_path})
- # # 解压DEM到指定文件夹
- # path = para['ParaValue']
- # import zipfile
- # zip_file = zipfile.ZipFile(path)
- # zip_list = zip_file.namelist() # 得到压缩包里所有文件
- # for f in zip_list:
- # zip_file.extract(f, self.__workspace_dem_path) # 循环解压文件到指定目录
- # if os.path.splitext(f)[1] == '.wgs84':
- # dem_name = f
- # processing_paras.update({'dem': os.path.join(self.__workspace_dem_path, f)})
- # zip_file.close()
- # self.verifyAndModifyWgsXml(self.__workspace_dem_path + '\\' + dem_name + '.xml',
- # self.__workspace_dem_path + '\\' + dem_name)
if name == 'Orbits':
if para['DataType'] == 'File':
processing_paras.update({'orbits': para['ParaValue']})
@@ -267,7 +255,7 @@ class AtmosphericMain:
para_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
processing_paras.update({'slc': para_path})
for pa in para_path_list:
- key_word = os.path.basename(pa).split('_')[7]
+ key_word = os.path.basename(pa).split('_')[6]
if key_word == self.mas_key_word:
self.input_paras.update({"MasterSarData": pa})
else:
diff --git a/backScattering/BackScatteringMain.py b/backScattering/BackScatteringMain.py
index d3c187f2..95e3455e 100644
--- a/backScattering/BackScatteringMain.py
+++ b/backScattering/BackScatteringMain.py
@@ -333,7 +333,7 @@ class ScatteringMain:
dem_merged_path = DEMProcess.dem_merged(in_dem_path, meta_file_path,
out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif
- dem_path = self.cut_dem(dem_merged_path, meta_file_path)
+ # dem_path = self.cut_dem(dem_merged_path, meta_file_path)
in_slc_path = None
for slc_path in in_tif_paths:
@@ -343,7 +343,7 @@ class ScatteringMain:
break
# 获取校正模型后
- Orthorectification.preCaldem_sar_rc(dem_path, in_slc_path, self.__workspace_preprocessing_path,
+ Orthorectification.preCaldem_sar_rc(dem_merged_path, in_slc_path, self.__workspace_preprocessing_path,
self.__workspace_processing_path.replace("\\", "\\\\")) # 初步筛选坐标范围
logger.info('progress bar: 40%')
# clip_dem_reample_path=os.path.join(self.__workspace_preprocessing_path, "SAR_dem.tiff")
diff --git a/deformation-C-SAR/DeformationImageHandle.py b/deformation-C-SAR/DeformationImageHandle.py
index 900f31ac..60cbf8d0 100644
--- a/deformation-C-SAR/DeformationImageHandle.py
+++ b/deformation-C-SAR/DeformationImageHandle.py
@@ -392,8 +392,8 @@ class DemImageHandler:
min = np.nanmin(t_data)
max = np.nanmax(t_data)
t_data[np.isnan(t_data)] = max
- if (max - min) < 256:
- t_data = (t_data - min) / (max - min) * 255
+ # if (max - min) < 256:
+ t_data = (t_data - min) / (max - min) * 255
out_img = Image.fromarray(t_data)
out_img = out_img.resize((q_c, q_r)) # 重采样
out_img = out_img.convert("L") # 转换成灰度图
diff --git a/dem-C-SAR/DemImageHandle.py b/dem-C-SAR/DemImageHandle.py
index 900f31ac..60cbf8d0 100644
--- a/dem-C-SAR/DemImageHandle.py
+++ b/dem-C-SAR/DemImageHandle.py
@@ -392,8 +392,8 @@ class DemImageHandler:
min = np.nanmin(t_data)
max = np.nanmax(t_data)
t_data[np.isnan(t_data)] = max
- if (max - min) < 256:
- t_data = (t_data - min) / (max - min) * 255
+ # if (max - min) < 256:
+ t_data = (t_data - min) / (max - min) * 255
out_img = Image.fromarray(t_data)
out_img = out_img.resize((q_c, q_r)) # 重采样
out_img = out_img.convert("L") # 转换成灰度图
diff --git a/dem-C-SAR/DemMain.py b/dem-C-SAR/DemMain.py
index cea44ff9..4dcf1319 100644
--- a/dem-C-SAR/DemMain.py
+++ b/dem-C-SAR/DemMain.py
@@ -40,6 +40,8 @@ else:
DEBUG = False
file = fileHandle(DEBUG)
tar = r'-' + cf.get('tar')
+alks = cf.get('alks')
+rlks = cf.get('rlks')
productLevel = cf.get('productLevel')
LogHandler.init_log_handler('run_log\\' + EXE_NAME)
logger = logging.getLogger("mylog")
@@ -184,12 +186,15 @@ class DemMain:
if name == 'MainImg':
processing_paras.update({'mainimg': para['ParaValue']})
if name == 'box':
- datas = para['ParaValue'].split(';')
- if len(datas) != 4:
- msg = 'para: box is error!box:' + para['ParaValue']
- raise Exception(msg)
- box = datas[0] + ' ' + datas[1] + ' ' + datas[2] + ' ' + datas[3]
- processing_paras.update({'box': box})
+ if para['ParaValue'] == 'empty':
+ processing_paras.update({'box': 'empty'})
+ else:
+ datas = para['ParaValue'].split(';')
+ if len(datas) != 4:
+ msg = 'para: box is error!box:' + para['ParaValue']
+ raise Exception(msg)
+ box = datas[0] + ' ' + datas[1] + ' ' + datas[2] + ' ' + datas[3]
+ processing_paras.update({'box': box})
if name == 'AuxDir':
if para['DataType'] == 'File':
processing_paras.update({'AuxDir': para['ParaValue']})
@@ -406,6 +411,10 @@ class DemMain:
CreateProductXml(para_dict, model_path, meta_xml_path).create_standard_xml()
return meta_xml_path
+ def isce_run_steps(self, run_steps, target):
+ for i in range(0, len(run_steps)):
+ uwm_file = os.path.join(self.__workspace_isce_path, "run_files", run_steps[i])
+ shutil.move(uwm_file, target)
def process_handle(self,start):
# 执行isce2.5生成干涉图
@@ -467,17 +476,36 @@ class DemMain:
logger.info('slc to isce_data finish!')
logger.info('progress bar: 10%')
- # os.chdir(isce_exe_dir)
- cmd = "stackStripMap.exe -s {} -w {} -d {} -m {} -a {} -r {} -x {} -u 'snaphu' --nofocus".format(out_slc_dir, isce_work_space, dem_path, main_img, 3, 3, box)
- # cmd = "stackStripMap.exe -s {} -w {} -d {} -m {} -a {} -r {} -u 'snaphu' --nofocus".format(out_slc_dir, isce_work_space, dem_path, main_img, 3, 3)
- logger.info('stackStripMap_cmd:{}'.format(cmd))
- result = os.system(cmd)
- logger.info('cmd_result:{}'.format(result))
- logger.info('stackStripMap finish!')
+ if self.__in_processing_paras['box'] == 'empty':
+ box = ''
+ cmd = "stackStripMap.exe -s {} -w {} -d {} -m {} -a {} -r {} -u 'snaphu' --nofocus".format(out_slc_dir,
+ isce_work_space,
+ dem_path,
+ main_img, alks,
+ rlks)
+ logger.info('stackStripMap_cmd:{}'.format(cmd))
+ result = os.system(cmd)
+ logger.info('cmd_result:{}'.format(result))
+ logger.info('stackStripMap finish!')
+ run_files = os.path.join(self.__workspace_isce_path, 'run_files')
+ for file in list(glob.glob(os.path.join(run_files, '*.job'))):
+ os.remove(file)
+ run_steps = ["run_07_grid_baseline"]
+ self.isce_run_steps(run_steps, self.__workspace_isce_path)
+ else:
+ box = "'" + self.__in_processing_paras['box'] + "'"
+ cmd = "stackStripMap.exe -s {} -w {} -d {} -m {} -a {} -r {} -x {} -u 'snaphu' --nofocus".format(
+ out_slc_dir, isce_work_space, dem_path, main_img, alks, rlks, box)
+ logger.info('stackStripMap_cmd:{}'.format(cmd))
+ result = os.system(cmd)
+ logger.info('cmd_result:{}'.format(result))
+ logger.info('stackStripMap finish!')
- run_files = os.path.join(self.__workspace_isce_path, 'run_files')
- for file in list(glob.glob(os.path.join(run_files, '*.job'))):
- os.remove(file)
+ run_files = os.path.join(self.__workspace_isce_path, 'run_files')
+ for file in list(glob.glob(os.path.join(run_files, '*.job'))):
+ os.remove(file)
+ run_steps = ["run_08_grid_baseline"]
+ self.isce_run_steps(run_steps, self.__workspace_isce_path)
cmd = ['-e', isce_exe_dir, '-o', self.__workspace_isce_path]
logger.info('autorun_cmd:{}'.format(cmd))
@@ -534,7 +562,7 @@ if __name__ == '__main__':
start = datetime.datetime.now()
try:
if len(sys.argv) < 2:
- xml_path = r'Dem.xml'
+ xml_path = r'Dem_C_SAR_V3.xml'
else:
xml_path = sys.argv[1]
Main = DemMain(xml_path)
diff --git a/dem-C-SAR/Dem_C_SAR_V3.xml b/dem-C-SAR/Dem_C_SAR_V3.xml
index d6acf1a5..f9c7c057 100644
--- a/dem-C-SAR/Dem_C_SAR_V3.xml
+++ b/dem-C-SAR/Dem_C_SAR_V3.xml
@@ -41,7 +41,7 @@
DEFAULT
DEFAULT
Man
- 20161129
+ 20230615
True
False
UploadInput
@@ -60,8 +60,8 @@
DEFAULT
Cal
- D:\micro\microproduct_depdence\GF3-Deformation\download\cls\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686.tar.gz;
- D:\micro\microproduct_depdence\GF3-Deformation\download\cls\GF3_KAS_FSI_002034_E113.4_N34.7_20161228_L1A_HHHV_L10002077539.tar.gz
+ F:\MicroWorkspace\yuan-GF3-INSAR\GF3B_MYC_UFS_008208_E116.5_N44.2_20230615_L1A_DH_L10000199472.tar.gz;
+ F:\MicroWorkspace\yuan-GF3-INSAR\GF3B_MYC_UFS_009462_E116.3_N44.3_20230910_L1A_DH_L10000237326.tar.gz
True
False
File
@@ -72,14 +72,14 @@
box
经纬度包围盒
- 经纬度包围盒SNWE。例子:30.0;30.2;117.3;117.5 37;38.2;108.87;109.1
+ 经纬度包围盒SNWE。例子:37;38.2;108.87;109.1
value
string
DEFAULT
DEFAULT
DEFAULT
Man
- 34.60;34.67;113.05;113.18
+ 44.14;44.3;116.351;116.437
True
True
UploadInput
@@ -92,12 +92,12 @@
DEM高程数据路径
DEM高程数据数据。数据来源:30米 ASTGTM2, 数据格式:tif。备注:数据的经纬度范围必须是整数
File
- File
+ tif
DEFAULT
DEFAULT
DEFAULT
Cal
- D:\micro\microproduct_depdence\GF3-Deformation\dem
+ F:\MicroWorkspace\COPDEM\COPDEM_Int16\115E39N_COP30.tif
True
False
File
@@ -137,8 +137,7 @@
DEFAULT
DEFAULT
-9999
-
- D:\micro\WorkSpace\Dem\Output\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686-DEM.tar.gz
+ D:\micro\WorkSpace\Dem\Output\GF3B_MYC_UFS_008208_E116.5_N44.2_20230615_L1A_DH_L10000199472-DEM.tar.gz
diff --git a/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Sensor/GF3_SLC.py b/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Sensor/GF3_SLC.py
index f36dd753..496c4bf8 100644
--- a/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Sensor/GF3_SLC.py
+++ b/dem-C-SAR/ISCEApp/_internal/isce/components/isceobj/Sensor/GF3_SLC.py
@@ -1253,7 +1253,7 @@ class _prodInfo(GF3_SLCNamespace):
elif z.tag == 'WidthInMeters':
self.WidthInMeters = float(z.text)
if z.tag == 'productLevel':
- self.productLevel = int(z.text)
+ self.productLevel = str(z.text)
elif z.tag == 'productType':
self.productType = z.text
elif z.tag == 'productFormat':
@@ -1421,14 +1421,17 @@ class _imageInfo(GF3_SLCNamespace):
elif z.tag == 'QualifyValue':
QualifyValue = z
for value in QualifyValue:
- if value.tag == 'HH':
- self.QualifyValue[0] = float(value.text)
- elif value.tag == 'HV':
- self.QualifyValue[1] = float(value.text)
- elif value.tag == 'VH':
- self.QualifyValue[2] = float(value.text)
- elif value.tag == 'VV':
- self.QualifyValue[3] = float(value.text)
+ try:
+ if value.tag == 'HH':
+ self.QualifyValue[0] = float(value.text)
+ elif value.tag == 'HV':
+ self.QualifyValue[1] = float(value.text)
+ elif value.tag == 'VH':
+ self.QualifyValue[2] = float(value.text)
+ elif value.tag == 'VV':
+ self.QualifyValue[3] = float(value.text)
+ except Exception as e:
+ print(e)
def __str__(self):
retstr = "_ImageInfo:"+sep+tab
diff --git a/dem-C-SAR/config.ini b/dem-C-SAR/config.ini
index a2f7bc2f..50aa9cee 100644
--- a/dem-C-SAR/config.ini
+++ b/dem-C-SAR/config.ini
@@ -5,7 +5,8 @@
exe_name = Dem
# 开启调试模式则不删除临时工作区,True:开启调试,False:不开启调试
debug = True
-
+alks = 10
+rlks = 10
# 算法信息
tar = DEM
productLevel = 5
diff --git a/landcover_c_sar/LandCoverMain.py b/landcover_c_sar/LandCoverMain.py
index edcb37d3..dad9311f 100644
--- a/landcover_c_sar/LandCoverMain.py
+++ b/landcover_c_sar/LandCoverMain.py
@@ -587,7 +587,7 @@ class LandCoverMain:
logging.info("feature_tif_paths:%s",feature_tif_paths)
# 对所有特征进行地理编码
- feature_geo = self.features_geo(feature_tif_paths)
+ feature_geo = self.features_geo(self.__feature_tif_dir)
# 新添加的特征做归一化
# for name in self.__feature_name_list:
# proj, geo, arr = self.imageHandler.read_img(self.__preprocessed_paras[name])
@@ -709,7 +709,7 @@ if __name__ == '__main__':
start = datetime.datetime.now()
try:
if len(sys.argv) < 2:
- xml_path = 'LandCover.xml'
+ xml_path = 'LandCover_C_SAR_V3.xml'
else:
xml_path = sys.argv[1]
main_handler = LandCoverMain(xml_path)
diff --git a/landcover_c_sar/LandCover_C_SAR_V3.xml b/landcover_c_sar/LandCover_C_SAR_V3.xml
index 691118d5..0a9bf5bd 100644
--- a/landcover_c_sar/LandCover_C_SAR_V3.xml
+++ b/landcover_c_sar/LandCover_C_SAR_V3.xml
@@ -42,7 +42,7 @@
DEFAULT
Cal
- E:\MicroWorkspace\GF3A_nanjing\input-ortho\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho.tar.gz
+ F:\MicroWorkspace\GF3A_nanjing\input-ortho\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho.tar.gz
True
False
File
@@ -55,12 +55,12 @@
标记数据
标记的样本数据
File
- csv
+ zip
DEFAULT
DEFAULT
DEFAULT
Cal
- E:\MicroWorkspace\GF3A_nanjing\input-ortho\LandCoverLable_geo.csv
+ F:\al_zhongji\C-SAR-data\landcover\CSAR_land_LandCoverLable.zip
True
True
UploadTable
@@ -90,17 +90,16 @@
FeatureCombination
极化特征组合
可选极化特征组合一、共14种特征(编号依次为0-13)
- Freeman:表面散射p_s(0)、偶次散射p_d(1)、体散射p_v(2);
- Touzi:散射角α_s(3)、散射相位ϕ_α(4)、目标散射对称度τ(5)、相对能量λ_i(6);
- Yamaguchi:表面散射f_s(7)、二次散射f_d(8)、体散射f_v(9)、螺旋体散射f_h(10);
- Cloude-Pottier:分解散射熵H(11)、反熵A(12)、平均散射角α(13)
+ Freeman:表面散射p_s(0)、偶次散射p_d(1)、体散射p_v(2);
+ Yamaguchi:表面散射f_s(3)、二次散射f_d(4)、体散射f_v(5)、螺旋体散射f_h(6);
+ Cloude-Pottier:分解散射熵H(7)、反熵A(8)、平均散射角α(9)
Value
string
DEFAULT
DEFAULT
DEFAULT
Man
- 0,1,2,7,8,9,10
+ 0,1,2
True
True
UploadInput
@@ -121,8 +120,7 @@
DEFAULT
DEFAULT
Man
-
- D:\micro\WorkSpace\LandCover\Output\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho-LANDClASS.tar.gz
+ D:\micro\WorkSpace\LandCover\Output\GF3_SAY_QPSI_011444_E118.9_N31.4_20181012_L1A_AHV_L10003515422-ortho-LANDCLASS.tar.gz
diff --git a/soilMoistureTop/SoilMoisture.xml b/soilMoistureTop/SoilMoisture.xml
index 71a16f6f..f5a04c92 100644
--- a/soilMoistureTop/SoilMoisture.xml
+++ b/soilMoistureTop/SoilMoisture.xml
@@ -36,16 +36,16 @@
File
tar.gz
Man
- F:\202306hb\sar_img\GF3B_SYC_QPSI_008316_E116.1_N43.3_20230622_L1A_AHV_L10000202892-cal.tar.gz
+ F:\Tian-GF3-Wenchang\GF3_SYC_QPSI_040488_E110.7_N19.9_20240418_L1A_AHV_L10006923783-cal.tar.gz
Covering
地表覆盖类型数据
经过地理定标(WGS84)的地表覆盖类型数据
File
- zip
+ tif
Man
- E:\辅助数据\GF3Data\soilMoisture\CSAR_solim_50T_20220101-20230101.zip
+ F:\Tian-GF3-Wenchang\landCover.tif
CoveringIDs
@@ -64,9 +64,9 @@
NDVI数据
经过地理定标(WGS84)的NDVI数据
File
- zip
+ tif
Man
- E:\辅助数据\GF3Data\soilMoisture\CSAR_solim_S2_202306_NDVI.zip
+ F:\Tian-GF3-Wenchang\NDVI\NDVI_20240417_WGS841.tif
NDVIScope
@@ -98,9 +98,9 @@
NDWI数据
经过地理定标(WGS84)的NDWI数据
File
- zip
+ tif
Man
- E:\辅助数据\GF3Data\soilMoisture\CSAR_solim_S2_202306_NDWI.zip
+ F:\Tian-GF3-Wenchang\NDWI\NDWI_20240417_WGS841.tif
e1
@@ -109,7 +109,7 @@
Value
float
Man
- -22.482554048434324
+ 5.308639240349595
100
-100
DEFAULT
@@ -121,7 +121,7 @@
Value
float
Man
- -10.72946251632336
+ -3.546779815649669
100
-100
DEFAULT
@@ -133,7 +133,7 @@
Value
float
Man
- -0.08238130673792357
+ 92.54550090363222
9999
-9999
DEFAULT
@@ -145,7 +145,7 @@
Value
float
Man
- 1.0194495140476119
+ -0.016379035637768415
9999
-9999
DEFAULT
@@ -157,7 +157,7 @@
Value
float
Man
- 6.107713980885245
+ -67.4408236678189
9999
-9999
DEFAULT
@@ -169,7 +169,7 @@
Value
float
Man
- -4.496951628949385
+ 0.054641574960551206
9999
-9999
DEFAULT
@@ -183,7 +183,7 @@
File
tar.gz
Man
- D:\micro\WorkSpace\SoilMoisture\Output\GF3B_SYC_QPSI_008316_E116.1_N43.3_20230622_L1A_AHV_L10000202892-cal-SMC.tar.gz
+ D:\micro\WorkSpace\SoilMoisture\Output\GF3_SYC_QPSI_040488_E110.7_N19.9_20240418_L1A_AHV_L10006923783-cal-SMC.tar.gz
DEFAULT
DEFAULT
DEFAULT
diff --git a/tool/algorithm/image/ImageHandle.py b/tool/algorithm/image/ImageHandle.py
index 58ebac57..afcef790 100644
--- a/tool/algorithm/image/ImageHandle.py
+++ b/tool/algorithm/image/ImageHandle.py
@@ -645,9 +645,8 @@ class ImageHandler:
# 灰度图
min = np.percentile(t_data, 2) # np.nanmin(t_data)
max = np.percentile(t_data, 98) # np.nanmax(t_data)
- t_data[np.isnan(t_data)] = max
- if (max - min) < 256:
- t_data = (t_data - min) / (max - min) * 255
+ # if (max - min) < 256:
+ t_data = (t_data - min) / (max - min) * 255
out_img = Image.fromarray(t_data)
out_img = out_img.resize((q_c, q_r)) # 重采样
out_img = out_img.convert("L") # 转换成灰度图
@@ -763,8 +762,8 @@ class ImageHandler:
if __name__ == '__main__':
cols = 7086
rows = 8064
- inc_xml = r'D:\micro\WorkSpace\SurfaceRoughness\Temporary\preprocessing\GF3C_KSC_QPSI_008440_E86.0_N44.7_20231113_L1A_AHV_L10000215825-ortho\GF3C_KSC_QPSI_008440_E86.0_N44.7_20231113_L1A_AHV_L10000215825.incidence.xml'
- ImageHandler.get_inc_angle(inc_xml, rows, cols)
+ inc_xml = r"D:\micro\WorkSpace\Dem\Temporary\processing\product\GF3_SAY_FSI_001614_E113.2_N34.5_20161129_L1A_HHHV_L10002015686-DEM.tiff"
+ # ImageHandler().write_quick_view(inc_xml)
# fn = r'E:\202306hb\result\GF3B_SYC_QPSI_008316_E116.1_N43.3_20230622_L1A_AHV_L10000202892-cal-SMC.tif'
# out = r'E:\202306hb\result\soil.tif'
# #
@@ -780,3 +779,7 @@ if __name__ == '__main__':
# s = ImageHandler().band_merge(path, path2, path3)
# print(s)
# pass
+ fn = r"D:\BaiduNetdiskDownload\植被物候\chen_features_warp.tif"
+ outP = r'D:\BaiduNetdiskDownload\植被物候\chen_features_warp_LWZ.tif'
+ im_proj, im_geotrans, im_arr = ImageHandler.read_img(fn)
+ ImageHandler.write_img(outP, im_proj, im_geotrans, im_arr)
diff --git a/tool/algorithm/ml/machineLearning.py b/tool/algorithm/ml/machineLearning.py
index 9ff25882..d9d04719 100644
--- a/tool/algorithm/ml/machineLearning.py
+++ b/tool/algorithm/ml/machineLearning.py
@@ -218,9 +218,10 @@ class MachineLeaning:
# 开启多进程处理
bp = BlockProcess()
block_size = bp.get_block_size(rows, cols)
+ name_d = out_tif_name.split('_')[6] + '_VTH'
block_features_dir = X_test_list
- bp_cover_dir = os.path.join(workspace_processing_path, out_tif_name,
+ bp_cover_dir = os.path.join(workspace_processing_path, name_d,
'pre_result\\') # workspace_processing_path + out_tif_name + '\\'
file.creat_dirs([bp_cover_dir])
@@ -242,7 +243,7 @@ class MachineLeaning:
suffix = '_' + name.split('_')[-4] + "_" + name.split('_')[-3] + "_" + name.split('_')[-2] + "_" + \
name.split('_')[-1]
- img_path = os.path.join(bp_cover_dir, out_tif_name + suffix) # bp_cover_dir + out_tif_name + suffix
+ img_path = os.path.join(bp_cover_dir, name_d + suffix) # bp_cover_dir + out_tif_name + suffix
row_begin = int(name.split('_')[-4])
col_begin = int(name.split('_')[-2])
pool.apply_async(ml.predict_blok, (clf, X_test, block_size, block_size, img_path, row_begin, col_begin, len(block_features_dir), n))
@@ -259,7 +260,7 @@ class MachineLeaning:
# 添加地理信息
cover_path = os.path.join(workspace_processing_path,
- out_tif_name + ".tif") # workspace_processing_path + out_tif_name + ".tif"
+ name_d + ".tif") # workspace_processing_path + out_tif_name + ".tif"
# bp.assign_spatial_reference_byfile(self.__ref_img_path, cover_path)
return cover_path
@@ -311,6 +312,46 @@ class MachineLeaning:
logger.info("gene_train_set success!")
return X_train, Y_train
+ @staticmethod
+ def gene_train_set_deLandcover(train_data_dic, feature_tif_dir, land_cover_tif, coverId):
+ """
+ 生成训练集
+ :param train_data_dic : 从csv读取的训练数据
+ :param feature_tif_dir : 特征影像路径路径
+ :return X_train, Y_train : 训练数据
+ """
+ in_tif_paths = list(glob.glob(os.path.join(feature_tif_dir, '*.tif')))
+ land_arr = ImageHandler.get_band_array(land_cover_tif, 1)
+ dim = len(in_tif_paths)
+ X_train = np.empty(shape=(0, dim))
+ Y_train = np.empty(shape=(0, 1))
+
+ ids = train_data_dic['ids']
+ positions = train_data_dic['positions']
+ for id, points in zip(ids, positions):
+ # for data in train_data_list:
+ if points == []:
+ raise Exception('data is empty!')
+ row, col = zip(*points)
+ l = len(points)
+ X = np.empty(shape=(l, dim))
+
+ for n, tif_path in zip(range(dim), in_tif_paths):
+ feature_array = ImageHandler.get_data(tif_path)
+ feature_array[np.isnan(feature_array)] = 0 # 异常值填充为0
+ for id in coverId:
+ feature_array[np.where(land_arr == id)] = 0
+ x = feature_array[row, col].T
+ X[:, n] = x
+
+ Y = np.full((l, 1), id)
+ X_train = np.vstack((X_train, X))
+ Y_train = np.vstack((Y_train, Y))
+ Y_train = Y_train.T[0, :]
+
+ logger.info("gene_train_set success!")
+ return X_train, Y_train
+
@staticmethod
def standardization(data, num=1):
# 矩阵标准化到[0,1]
diff --git a/tool/algorithm/polsarpro/createfeature.py b/tool/algorithm/polsarpro/createfeature.py
index b252a083..4aaaf104 100644
--- a/tool/algorithm/polsarpro/createfeature.py
+++ b/tool/algorithm/polsarpro/createfeature.py
@@ -29,18 +29,18 @@ class CreateFeature:
atp = AHVToPolsarpro()
atp = AHVToPolsarpro(hh_hv_vh_vv_list)
- lee_filter_path = os.path.join(workspace_processing_path, name, 'lee_filter\\') # workspace_processing_path + name + '\\lee_filter\\'
+ lee_filter_path = os.path.join(workspace_processing_path, name.split('_')[6], 'lee_filter\\') # workspace_processing_path + name + '\\lee_filter\\'
if self._debug == False:
- t3_path = os.path.join(workspace_processing_path, name, 'psp_t3\\') # workspace_processing_path + name + '\\psp_t3\\'
+ t3_path = os.path.join(workspace_processing_path, name.split('_')[6], 'psp_t3\\') # workspace_processing_path + name + '\\psp_t3\\'
# atp.ahv_to_polsarpro_t3(t3_path, tif_path)
polarization = ['HH', 'HV', 'VH', 'VV']
- if os.path.exists(workspace_preprocessing_path + name + '\\'):
- meta_xml_paths = list(glob.glob(os.path.join(workspace_preprocessing_path + name, '*.meta.xml')))
+ if os.path.exists(workspace_preprocessing_path + name.split('_')[6] + '\\'):
+ meta_xml_paths = list(glob.glob(os.path.join(workspace_preprocessing_path + name.split('_')[6], '*.meta.xml')))
meta_dic = InitPara.get_meta_dic_new(meta_xml_paths, name)
calibration = Calibration.get_Calibration_coefficient(meta_dic['Origin_META'], polarization)
- tif_path = atp.calibration(calibration, workspace_preprocessing_path, name)
+ tif_path = atp.calibration(calibration, workspace_preprocessing_path)
atp.ahv_to_polsarpro_t3_veg(t3_path, tif_path)
# Lee滤波
@@ -64,7 +64,7 @@ class CreateFeature:
freemanOutDir = os.path.join(workspace_processing_path, name + '\\freeman\\')
if self._debug == False:
freemDecom = PspFreemanDecomposition(exeDir, t3_path, freemanOutDir)
- flag = freemDecom.api_freeman_decomposition_T3(0, 0, rows, cols)
+ flag = freemDecom.api_freeman_decomposition_T3(0, 0, rows, cols, 7)
if not flag:
logger.error('FreemanDecomposition err')
return False, None
@@ -88,7 +88,7 @@ class CreateFeature:
yamaguchiOutDir = os.path.join(workspace_processing_path, name + '\\yamaguchi\\')
if self._debug == False:
yamaguchiDecom = PspYamaguchiDecomposition(exeDir, t3_path, yamaguchiOutDir)
- flag = yamaguchiDecom.api_yamaguchi_4components_decomposition_T3(0, 0, rows, cols)
+ flag = yamaguchiDecom.api_yamaguchi_4components_decomposition_T3(0, 0, rows, cols, 7)
if not flag:
logger.error('CloudePottierDecomposition err')
return False, None
@@ -102,7 +102,7 @@ class CreateFeature:
cloudeDecom = PspCloudePottierDecomposition(
exeDir, t3_path, cloudeOutDir)
flag = cloudeDecom.api_h_a_alpha_decomposition_T3(
- 0, 0, rows, cols)
+ 0, 0, rows, cols, 7)
if not flag:
logger.error('CloudePottierDecomposition err')
return False, None
@@ -152,14 +152,14 @@ class CreateFeature:
hh_hv_vh_vv_dic.update({'VH': hh_hv_vh_vv_list[2]})
hh_hv_vh_vv_dic.update({'VV': hh_hv_vh_vv_list[3]})
t3_path = os.path.join(workspace_processing_path, name, "lee_filter") # workspace_processing_path + name + "\\lee_filter"
- feature_tif_dir = os.path.join(workspace_processing_path, name, 'features') # workspace_processing_path + name + "\\features"
+ feature_tif_dir = os.path.join(workspace_processing_path, name.split('_')[6], 'features') # workspace_processing_path + name + "\\features"
cfeature = CreateFeature(debug, exe_dir)
# cfeature.creat_h_a_alpha_features(t3_path, feature_tif_dir)
t3_path = cfeature.ahv_to_t3(workspace_processing_path, workspace_preprocessing_path, hh_hv_vh_vv_list, name, FILTER_SIZE)
- flag, outFolderDic = cfeature.decompose(workspace_processing_path, name, t3_path, rows, cols, hh_hv_vh_vv_dic, FeatureInput) # , 'Touzi'
+ flag, outFolderDic = cfeature.decompose(workspace_processing_path, name.split('_')[6], t3_path, rows, cols, hh_hv_vh_vv_dic, FeatureInput) # , 'Touzi'
cfeature.cereat_features_dic(outFolderDic, feature_tif_dir)
return feature_tif_dir
diff --git a/tool/algorithm/xml/AlgXmlHandle.py b/tool/algorithm/xml/AlgXmlHandle.py
index 936d2e3b..b752cfe9 100644
--- a/tool/algorithm/xml/AlgXmlHandle.py
+++ b/tool/algorithm/xml/AlgXmlHandle.py
@@ -713,14 +713,15 @@ class InitPara:
def get_mult_tar_gz_inf(self,tar_gz_path, workspace_preprocessing_path):
para_dic = {}
name = os.path.split(tar_gz_path)[1].rstrip('.tar.gz')
+ name_d = name.split('_')[6]
para_dic.update({'name': name})
- file_dir = os.path.join(workspace_preprocessing_path, name + '\\')
+ file_dir = os.path.join(workspace_preprocessing_path, name_d + '\\')
fileHandle().de_targz(tar_gz_path, file_dir)
# 元文件字典
- para_dic.update(InitPara.get_meta_dic_VP(InitPara.get_meta_paths(file_dir, name), name))
+ para_dic.update(InitPara.get_meta_dic_VP(InitPara.get_meta_paths(file_dir, name_d), name))
# tif路径字典
- pol_dic = InitPara.get_polarization_mode(InitPara.get_tif_paths(file_dir, name))
+ pol_dic = InitPara.get_polarization_mode(InitPara.get_tif_paths(file_dir, name_d))
parameter_path = os.path.join(file_dir, "orth_para.txt")
para_dic.update({name + "paraMeter": parameter_path})
for key, in_tif_path in pol_dic.items():
diff --git a/vegetationPhenology/VegetationPhenology.xml b/vegetationPhenology/VegetationPhenology.xml
index 4560c84e..de03e454 100644
--- a/vegetationPhenology/VegetationPhenology.xml
+++ b/vegetationPhenology/VegetationPhenology.xml
@@ -107,14 +107,14 @@
FeatureCombination
极化特征组合
- 可选极化特征组合一、共14种特征(编号依次为0-13)
+ 可选极化特征组合一、共10种特征(编号依次为0-9)
Freeman:表面散射p_s(0)、偶次散射p_d(1)、体散射p_v(2);
- Yamaguchi:表面散射f_s(7)、二次散射f_d(8)、体散射f_v(9)、螺旋体散射f_h(10);
- Cloude-Pottier:分解散射熵H(11)、反熵A(12)、平均散射角α(13)
+ Yamaguchi:表面散射f_s(3)、二次散射f_d(4)、体散射f_v(5)、螺旋体散射f_h(6);
+ Cloude-Pottier:分解散射熵H(7)、反熵A(8)、平均散射角α(9)
Value
string
Man
- 0,1,2,7,8,9,10
+ 0,1,2,3,4,5,6,7,8,9
True
True
UploadInput
diff --git a/vegetationPhenology/VegetationPhenologyAuxData.py b/vegetationPhenology/VegetationPhenologyAuxData.py
index 832d2a37..f381b46a 100644
--- a/vegetationPhenology/VegetationPhenologyAuxData.py
+++ b/vegetationPhenology/VegetationPhenologyAuxData.py
@@ -318,22 +318,23 @@ class PhenoloyMeasCsv_geo:
if train_data[1] == type_id:
train_data[3] = train_data[3] + self.__render(poly)
if train_data[3] == [] :
- raise Exception('there are empty data!', train_data)
-
- if len(train_data_list) <= 1:
- raise Exception('there is only one label type!', train_data_list)
-
- num_list = []
- for train_data in train_data_list:
- if not len(train_data[3]) == 0:
- num_list.append(len(train_data[3]))
- max_num = np.min(num_list)
- for train_data in train_data_list:
- logger.info(str(train_data[0]) + "," + str(train_data[2]) +"," + "num:" + str(len(train_data[3])))
- # max_num = self.__max_tran__num_per_class
- logger.info("max number =" + str(max_num) +", random select"+str(max_num)+" point as train data!")
- if(len(train_data[3]) > max_num):
- train_data[3] = random.sample(train_data[3], max_num)
+ pass
+ # raise Exception('there are empty data!', train_data)
+ #
+ # if len(train_data_list) <= 1:
+ # raise Exception('there is only one label type!', train_data_list)
+ #
+ # num_list = []
+ # for train_data in train_data_list:
+ # if not len(train_data[3]) == 0:
+ # num_list.append(len(train_data[3]))
+ # max_num = np.min(num_list)
+ # for train_data in train_data_list:
+ # logger.info(str(train_data[0]) + "," + str(train_data[2]) +"," + "num:" + str(len(train_data[3])))
+ # # max_num = self.__max_tran__num_per_class
+ # logger.info("max number =" + str(max_num) +", random select"+str(max_num)+" point as train data!")
+ # if(len(train_data[3]) > max_num):
+ # train_data[3] = random.sample(train_data[3], max_num)
return train_data_list
diff --git a/vegetationPhenology/VegetationPhenologyMain.py b/vegetationPhenology/VegetationPhenologyMain.py
index cc971394..348919b3 100644
--- a/vegetationPhenology/VegetationPhenologyMain.py
+++ b/vegetationPhenology/VegetationPhenologyMain.py
@@ -101,8 +101,8 @@ class PhenologyMain:
self.__processing_paras.update(InitPara(DEBUG).get_mult_tar_gz_infs(self.__processing_paras, self.__workspace_preprocessing_path))
SrcImagePath = self.__input_paras["AHVS"]['ParaValue']
paths = SrcImagePath.split(';')
- SrcImageName = os.path.split(paths[0])[1].split('.tar.gz')[0]
- result_name = SrcImageName + tar + ".tar.gz"
+ self.SrcImageName = os.path.split(paths[0])[1].split('.tar.gz')[0]
+ result_name = self.SrcImageName + tar + ".tar.gz"
self.__out_para = os.path.join(self.__workspace_path, EXE_NAME, 'Output', result_name)
self.__alg_xml_handler.write_out_para("VegetationPhenologyProduct", self.__out_para) #写入输出参数
logger.info('check_source success!')
@@ -147,7 +147,7 @@ class PhenologyMain:
l1a_height = ImageHandler.get_img_height(self.__processing_paras[hh_key])
tr = TransImgL1A(ori_sim_path, scopes_roi, l1a_height, l1a_width)
for k in key_list:
- out_path = os.path.join(self.__workspace_preprocessed_path, k + "_preprocessed.tif")
+ out_path = os.path.join(self.__workspace_preprocessed_path, k.split('_')[6] + '_' + k.split('_')[10] + "_preprocessed.tif")
tr.cut_L1A(self.__processing_paras[k], out_path)
self.__preprocessed_paras.update({k: out_path})
self._name_tr_dic.update({name: tr})
@@ -173,8 +173,9 @@ class PhenologyMain:
scopes_roi = p.cal_intersect_shp(intersect_shp_path, para_names_geo, self.__processing_paras, scopes)
cutted_img_paths = p.cut_imgs_VP(self.__workspace_preprocessing_path, para_names_geo, self.__processing_paras,
intersect_shp_path, name)
+ name_d = name.split('_')[6]
self.__preprocessed_paras.update({name + '_sim_ori': cutted_img_paths.get(name + '_sim_ori')})
- self.__preprocessed_paras.update({name + '_Covering': cutted_img_paths.get('Covering')})
+ self.__preprocessed_paras.update({name_d + '_Covering': cutted_img_paths.get('Covering')})
self.preprocess_single_tar(name, scopes_roi)
@@ -277,7 +278,7 @@ class PhenologyMain:
sim_ori_path = self.__preprocessed_paras[sim_ori_key]
hh_path = self.__preprocessed_paras[name + "_HH"]
- hh_geo_path = os.path.join(self.__workspace_processing_path, name.split('-')[0] + '_geo.tif')
+ hh_geo_path = os.path.join(self.__workspace_processing_path, os.path.split(os.path.basename(hh_path))[0] + '_geo.tif')
paramter = self.__processing_paras[name + "paraMeter"]
self.calInterpolation_bil_Wgs84_rc_sar_sigma(paramter, sim_ori_path, hh_path, hh_geo_path)
@@ -290,7 +291,7 @@ class PhenologyMain:
logger.info("read phenology Measure.csv success!")
# 添加四极化后向散射系数到特征图中
- feature_tif_dir = os.path.join(self.__workspace_processing_path, name, 'features')
+ feature_tif_dir = os.path.join(self.__workspace_processing_path, name.split('_')[6], 'features')
if not os.path.exists(feature_tif_dir):
os.makedirs(feature_tif_dir)
origin_xml = self.__processing_paras[name + "_Origin_META"]
@@ -319,7 +320,7 @@ class PhenologyMain:
rows = self.imageHandler.get_img_height(hh_hv_vh_vv_list[0])
featureInput = self.__getInputFeatures()
- feature_dir = CreateFeature.decompose_single_tar(hh_hv_vh_vv_list, self.__workspace_processing_path, self.__workspace_preprocessing_path, name, self._env_str, rows, cols, FILTER_SIZE=3, debug=DEBUG, FeatureInput=featureInput)
+ feature_dir = CreateFeature.decompose_single_tar(hh_hv_vh_vv_list, self.__workspace_processing_path, self.__workspace_preprocessing_path, name, self._env_str, rows, cols, FILTER_SIZE=7, debug=DEBUG, FeatureInput=featureInput)
feature_geo_dir = self.features_geo(feature_dir, paramter, sim_ori_path, name)
# # 获取训练集提取特征的信息
@@ -350,9 +351,9 @@ class PhenologyMain:
return feature_geo_dir, train_data_dic
def features_geo(self, features_path, paraMeter, sim_ori, sar_name):
- dir = os.path.join(self.__workspace_processing_path, sar_name, 'features_geo')
+ dir = os.path.join(self.__workspace_processing_path, sar_name.split('_')[6], 'features_geo')
if not os.path.exists(dir):
- os.mkdir(dir)
+ os.makedirs(dir)
in_tif_paths = list(glob.glob(os.path.join(features_path, '*.tif')))
processes_num = min([len(in_tif_paths), multiprocessing_num, multiprocessing.cpu_count() - 1])
pool = multiprocessing.Pool(processes=processes_num)
@@ -384,7 +385,8 @@ class PhenologyMain:
feature_dir, train_data_dic = self.create_feature_single_tar(name)
# 生成训练集
# X_train_part, Y_train_part, optimal_feature = ml.gene_optimal_train_set(train_data_dic, feature_dir, 0.08, 0.7)
- X_train_part, Y_train_part = ml.gene_train_set(train_data_dic, feature_dir)
+
+ X_train_part, Y_train_part = ml.gene_train_set_deLandcover(train_data_dic, feature_dir, self.__preprocessed_paras[name.split('_')[6] + 'Covering'], self.__processing_paras['CoveringIDs'])
name_list = ml.get_name_list(feature_dir)
# 生成测试集合
@@ -442,8 +444,8 @@ class PhenologyMain:
para_names = ['Covering']
mask_dir = os.path.join(self.__workspace_processing_path, name + '\\')
bare_land_mask_path = roi().roi_process_VP(para_names, mask_dir,
- self.__processing_paras, self.__preprocessed_paras, name)
- product_path_pro = os.path.join(self.__product_dic, os.path.basename(product_path).split('.tif')[0] + tar + '.tif')
+ self.__processing_paras, self.__preprocessed_paras, name.split('_')[6])
+ product_path_pro = os.path.join(self.__product_dic, name + tar + '.tif')
# 获取影像roi区域
roi.cal_roi(product_path_pro, cover_geo_path, bare_land_mask_path, background_value=0)
self.imageHandler.write_quick_view(product_path_pro, color_img=True)
@@ -538,8 +540,9 @@ class PhenologyMain:
def resampleImgs(self, name, refer_img_path):
cover_rampling_path = os.path.join(self.__workspace_processing_path, name + "_cover.tif")
- pp.resampling_by_scale(self.__preprocessed_paras[name + "_Covering"], cover_rampling_path, refer_img_path)
- self.__preprocessed_paras[name + "_Covering"] = cover_rampling_path
+ name_d = name.split('_')[6]
+ pp.resampling_by_scale(self.__preprocessed_paras[name_d + "_Covering"], cover_rampling_path, refer_img_path)
+ self.__preprocessed_paras[name_d + "_Covering"] = cover_rampling_path
def create_roi(self, img_path):
"""
@@ -588,7 +591,7 @@ class PhenologyMain:
SrcImageName = os.path.basename(product_path).split('.tif')[0]
model_path = "./product.xml"
meta_xml_path = os.path.join(self.__product_dic, SrcImageName + ".meta.xml")
- key = os.path.basename(product_path).split('-VP.tif')[0] + '_Origin_META'
+ key = self.SrcImageName + '_Origin_META'
para_dict = CreateMetaDict(image_path, self.__processing_paras[key], self.__workspace_processing_path,
out_path1, out_path2).calu_nature()
para_dict.update({"imageinfo_ProductName": "植被物候"})
diff --git a/vegetationPhenology/config.ini b/vegetationPhenology/config.ini
index bf007472..aa6c4494 100644
--- a/vegetationPhenology/config.ini
+++ b/vegetationPhenology/config.ini
@@ -11,6 +11,6 @@ exe_name = VegetationPhenology
debug = False
######2-算法处理参数######
# 影像滤波尺寸。取值范围:大于1的奇数,默认值:3
-filter_size = 3
+filter_size = 7
#每个类别最大训练样本量
max_tran__num_per_class =100000
\ No newline at end of file
diff --git a/vegetationPhenology/testxmlreading.py b/vegetationPhenology/testxmlreading.py
index cc3a0377..6b620221 100644
--- a/vegetationPhenology/testxmlreading.py
+++ b/vegetationPhenology/testxmlreading.py
@@ -51,11 +51,11 @@ def createcsv_roi_polygon(coordinates):
return polygon_str
if __name__ == '__main__':
- xmlpath = r"E:\MicroWorkspace\GF3A_nanjing\input-ortho\test_shp\test.xml"
+ xmlpath = r"F:\MicroWorkspace\Micro\likun-GF3-VegetationP\vegTest.xml"
tree_obj = ET.parse(xmlpath)
csv_header = ['sar_img_name', 'phenology_id', 'phenology_name', 'roi_polygon']
- csvpath = r"E:\MicroWorkspace\GF3A_nanjing\input-ortho\test_shp\test.csv"
+ csvpath = r"F:\MicroWorkspace\Micro\likun-GF3-VegetationP\vegTest.csv"
# csvcreateTitile(csvpath,csv_header)
csvfile(csvpath,csv_header)
# 得到所有匹配Region 标签的Element对象的list集合