diff --git a/Ortho/Ortho-S-SAR.xml b/Ortho/Ortho-S-SAR.xml index 010cb42..99eddfe 100644 --- a/Ortho/Ortho-S-SAR.xml +++ b/Ortho/Ortho-S-SAR.xml @@ -1,7 +1,7 @@ CSAR_202107275419_0001-0 - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\ + D:\micro\SWork\ File ElementAlg @@ -37,7 +37,7 @@ 无需求 - + SLC SLC元文件 @@ -45,7 +45,7 @@ File tar.gz Cal - F:\MicroWorkspace\S_SAR\yuan\HJ2E_KSC_STRIP_006199_E85.8_N44.2_20231124_SLC_HHHV_L10000135594.tar.gz + F:\MicroWorkspace\20240814tw\HJ2E_KRN_QPS_008852_E110.6_N20.1_20240515_SLC_AHV_L10000208028.tar.gz True False File @@ -60,7 +60,7 @@ File tif Cal - F:\MicroWorkspace\S_SAR\yuan\dem\dem_cut1.tif + F:\MicroWorkspace\20240814tw\dem\109E15N_COP30.tif True True File @@ -68,6 +68,25 @@ 0 DEM + + baseMap + 哨兵底图数据 + 同地区哨兵地图用于影像配准 + File + File + Cal + DEFAULT + DEFAULT + DEFAULT + + F:\MicroWorkspace\20240814tw\S1A + True + False + File + Aux + 0 + baseMap + CorrectMethod 选择校正方法 @@ -92,7 +111,7 @@ File tar.gz Cal - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\Ortho\Output\HJ2E_KSC_STRIP_006199_E85.8_N44.2_20231124_SLC_HHHV_L10000135594-Ortho.tar.gz + D:\micro\SWork\Ortho\Output\HJ2E_KRN_QPS_008852_E110.6_N20.1_20240515_SLC_AHV_L10000208028-Ortho.tar.gz DEFAULT DEFAULT DEFAULT diff --git a/Ortho/Ortho.rar b/Ortho/Ortho.rar deleted file mode 100644 index 181bdb5..0000000 Binary files a/Ortho/Ortho.rar and /dev/null differ diff --git a/Ortho/OrthoAlg.py b/Ortho/OrthoAlg.py index d365695..91b64d7 100644 --- a/Ortho/OrthoAlg.py +++ b/Ortho/OrthoAlg.py @@ -121,7 +121,57 @@ class ScatteringAlg: # db_arr[np.isinf(db_arr)] = -9999 ImageHandler.write_img(db_path, proj, geotrans, db_arr, -9999) + @staticmethod + def sar_backscattering_coef_RPC(in_sar_tif, meta_file_path, out_sar_tif, replece_VV=False, is_DB=True): + # 读取原始SAR影像 + proj, geotrans, in_data = ImageHandler.read_img(in_sar_tif) + + # 计算强度信息 + I = np.array(in_data[0], dtype="float32") + Q = np.array(in_data[1], dtype="float32") + + where_9999_0 = np.where(I == -9999) + where_9999_1 = np.where(Q == -9999) + I[where_9999_0] = 1.0 + Q[where_9999_1] = 1.0 + + I2 = np.square(I) + Q2 = np.square(Q) + intensity_arr = I2 + Q2 + + # 获取极化类型 + if 'HH' in os.path.basename(in_sar_tif): + polarization = 'HH' + elif 'HV' in os.path.basename(in_sar_tif): + polarization = 'HV' + elif 'VH' in os.path.basename(in_sar_tif): + polarization = 'VH' + elif 'VV' in os.path.basename(in_sar_tif): + polarization = 'VV' + if replece_VV: + polarization = 'HV' # 土壤水分算法中可能会用HV替换VV + elif 'DH' in os.path.basename(in_sar_tif): + polarization = 'HH' + else: + raise Exception('there are not HH、HV、VH、VV in path:', in_sar_tif) + + # 获取参数 + QualifyValue = MetaDataHandler.get_QualifyValue(meta_file_path, polarization) + # Kdb = MetaDataHandler.get_Kdb(meta_file_path, polarization) + Kdb = 0 + + # 计算后向散射系数 + # 对数形式 + coef_arr = 10 * (np.log10(intensity_arr * ((QualifyValue / 32767) ** 2))) - Kdb + coef_arr[np.isnan(coef_arr)] = 0 + coef_arr[np.isinf(coef_arr)] = 0 + coef_arr[where_9999_0] = 0 + coef_arr[where_9999_1] = 0 + ## 输出的SAR后向散射系数产品 + ImageHandler.write_img(out_sar_tif, proj, geotrans, coef_arr, 0) + + return True @@ -1008,6 +1058,34 @@ class DEMProcess(object): time.sleep(3) #gdal.CloseDir(out_DEM) return out_DEM + + @staticmethod + def bsMap_merged(in_bsMap_path, meta_file_path, out_bsMap_path): + ''' + DEM重采样函数,默认坐标系为WGS84 + agrs: + in_dem_path: 输入的DEM文件夹路径 + meta_file_path: 输入的xml元文件路径 + out_dem_path: 输出的DEM文件夹路径 + ''' + # 读取文件夹中所有的DEM + bsMap_file_paths = [os.path.join(in_bsMap_path, dem_name) for dem_name in os.listdir(in_bsMap_path) if + dem_name.find(".tif") >= 0 and dem_name.find(".tif.") == -1] + spatialreference = osr.SpatialReference() + spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree + spatialproj = spatialreference.ExportToWkt() # 导出投影结果 + # 将DEM拼接成一张大图 + mergeFile = gdal.BuildVRT(os.path.join(out_bsMap_path, "mergedBsMap_VRT.tif"), bsMap_file_paths) + out_DEM = os.path.join(out_bsMap_path, "MergedBsMap.tif") + gdal.Warp(out_DEM, + mergeFile, + format="GTiff", + dstSRS=spatialproj, + dstNodata=-9999, + outputType=gdal.GDT_Float32) + time.sleep(3) + # gdal.CloseDir(out_DEM) + return out_DEM @staticmethod def dem_resampled(in_dem_path,out_dem_path,samling_f): in_dem=gdal.Open(in_dem_path,gdalconst.GA_ReadOnly) @@ -1659,6 +1737,12 @@ class IndirectOrthorectification(Orthorectification): print(os.system(exe_cmd)) print("==========================================================================") + def get_offset(self, baseMap, in_sar, in_sar_sigma): + exe = r".\baseTool\x64\calOffset\calOffset.exe" + exe_cmd = r"set PROJ_LIB=.\baseTool\x64\Release; & {0} {1} {2} {3}".format(exe, baseMap, in_sar, in_sar_sigma) + print(exe_cmd) + print(os.system(exe_cmd)) + print("==========================================================================") def lee_process_sar(self,in_sar, out_sar, win_size, noise_var): ''' diff --git a/Ortho/OrthoMain.py b/Ortho/OrthoMain.py index 9897045..9879f82 100644 --- a/Ortho/OrthoMain.py +++ b/Ortho/OrthoMain.py @@ -191,6 +191,7 @@ class OrthoMain: self.__workspace_Temporary_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary") self.__workspace_unpack_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", "unpack") self.__workspace_ResampledDEM_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestDEM') + self.__workspace_baseMap_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'baseMap') self.__workspace_LutImg_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestLut') self.__workspace_IncidenceImg_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestInc') self.__workspace_SimImg_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestSim') @@ -202,7 +203,8 @@ class OrthoMain: self.__workspace_unpack_path, self.__workspace_ResampledDEM_path, self.__workspace_LutImg_path, self.__workspace_IncidenceImg_path, self.__workspace_SimImg_path, self.__workspace_SARIntensity_path, - self.__workspace_package_path, self.__workspace_origin_path] + self.__workspace_package_path, self.__workspace_origin_path, + self.__workspace_baseMap_path] for path in path_list: if os.path.exists(path): @@ -391,7 +393,7 @@ class OrthoMain: if CorrectMethod.get('CorrectMethod') == '1' or CorrectMethod.get('CorrectMethod') == 1: logger.info("CorrectMethod is RPC!") - return self.RPC_process_handle() + return self.RD_process_handle() elif CorrectMethod.get('CorrectMethod') == '2' or CorrectMethod.get('CorrectMethod') == 2: logger.info("CorrectMethod is RD!") @@ -399,8 +401,6 @@ class OrthoMain: return self.RD_process_handle() else: raise Exception('No CorrectMethod') - - def RPC_process_handle(self): @@ -516,8 +516,16 @@ class OrthoMain: def cut_dem(self, dem_merged_path, meta_file_path): - left_up_lon = 0 - left_up_lat = 0 + _, scopes = DictXml(meta_file_path).get_extend() + intersect_polygon = pp().intersect_polygon(scopes) + if intersect_polygon is None: + raise Exception('cal intersect box fail!') + shp_path = os.path.join(self.__workspace_Temporary_path, 'IntersectPolygon.shp') + if pp().write_polygon_shp(shp_path, intersect_polygon, 4326) is False: + raise Exception('create intersect shp fail!') + dem_process = os.path.join(self.__workspace_Temporary_path, 'dem_cut.tif') + pp().cut_img(dem_process, dem_merged_path, shp_path) + return dem_process def process_sim_ori(self, sim_ori): @@ -534,7 +542,49 @@ class OrthoMain: pp().cut_img(sim_ori_process, sim_ori, shp_path) return sim_ori_process - + def correct_sim_ori(self, Orthorectification, slc_paths, bsMap_merged_path, out_dir_path): + # 对映射表进行校正 + sim_ori_tiff = out_dir_path + "\\" + "RD_sim_ori.tif" + out_sim_ori = out_dir_path + "\\" + "sim_ori-ortho.tif" + parameter_path = os.path.join(self.__workspace_package_path, "orth_para.txt") + in_tif_paths = list(glob.glob(os.path.join(slc_paths, '*.tiff'))) + out_rpc_db = os.path.join(self.__workspace_baseMap_path, 'rpc_line.tif') + alg.sar_backscattering_coef_RPC(in_tif_paths[0], self.__in_processing_paras['META'], out_rpc_db) + + db_tif_path = os.path.join(self.__workspace_baseMap_path, 'rpc_db_geo.tif') + + Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, sim_ori_tiff, out_rpc_db, + db_tif_path) + dataset = ImageHandler().get_dataset(db_tif_path) + baseMapCut = os.path.join(self.__workspace_baseMap_path, 'baseMapCut.tif') + inputCut = os.path.join(self.__workspace_baseMap_path, 'inputCut.tif') + baseMapResample = os.path.join(self.__workspace_baseMap_path, 'baseMapCut_Resample.tif') + shpCenterFile = os.path.join(self.__workspace_baseMap_path, 'shpCenter.shp') + center_scopes = (ImageHandler().get_center_scopes(dataset),) + intersect_polygon = pp().intersect_polygon(center_scopes) + if intersect_polygon is None: + raise Exception('create intersect shp fail!') + if pp().write_polygon_shp(shpCenterFile, intersect_polygon, 4326) is False: + raise Exception('create intersect shp fail!') + pp().cut_img(baseMapCut, bsMap_merged_path, shpCenterFile) + pp().cut_img(inputCut, db_tif_path, shpCenterFile) + pp().resampling_by_scale(baseMapCut, baseMapResample, inputCut) + in_sar_png = self.imageHandler.write_view(inputCut) + baseMap_png = self.imageHandler.write_view(baseMapResample) + Orthorectification.get_offset(in_sar_png, baseMap_png, inputCut) + off_txt = os.path.join(os.path.dirname(inputCut), 'off.txt') + with open(off_txt, 'r') as f: + data = f.readlines() + x = float(data[0]) + y = float(data[1]) + im_proj, im_geotrans, im_arr = self.imageHandler.read_img(sim_ori_tiff) + lon_new = im_geotrans[0] + x + lat_new = im_geotrans[3] - y + im_geosNew = [lon_new, im_geotrans[1], im_geotrans[2], lat_new, im_geotrans[4], im_geotrans[5]] + ImageHandler().write_img(out_sim_ori, im_proj, im_geosNew, im_arr) + os.remove(sim_ori_tiff) + return out_sim_ori + def RD_process_handle(self): # RPC logger.info(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')) @@ -546,7 +596,11 @@ class OrthoMain: out_dem_path = self.__workspace_ResampledDEM_path dem_merged_path=DEMProcess.dem_merged(in_dem_path, meta_file_path, out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif - # self.cut_dem(dem_merged_path, meta_file_path) + bsMap = self.__in_processing_paras['baseMap'] + ortho_bsMap_path = self.__workspace_baseMap_path + bsMap_merged_path = DEMProcess.bsMap_merged(bsMap, meta_file_path, ortho_bsMap_path) + + dem_path = self.cut_dem(dem_merged_path, meta_file_path) # 2、间接定位法求解行列坐标 slc_paths = self.__in_processing_paras["SLC"] # 2.1 生成映射表 @@ -565,8 +619,9 @@ class OrthoMain: in_slc_path=os.path.join(slc_paths,slc_path) break # 获取校正模型后 - Orthorectification.preCaldem_sar_rc(dem_merged_path,in_slc_path,self.__workspace_Temporary_path,self.__workspace_package_path.replace("\\","\\\\")) # 初步筛选坐标范围 - + Orthorectification.preCaldem_sar_rc(dem_path,in_slc_path,self.__workspace_Temporary_path,self.__workspace_package_path.replace("\\","\\\\")) # 初步筛选坐标范围 + out_dir_path = self.__workspace_package_path.replace("\\", "\\\\") + sim_ori_rpc = self.correct_sim_ori(Orthorectification, slc_paths, bsMap_merged_path, out_dir_path) logger.info('progress bar: 40%') # clip_dem_reample_path=os.path.join(self.__workspace_Temporary_path, "SAR_dem.tiff") # infooption=gdal.InfoOptions("-json") @@ -574,7 +629,7 @@ class OrthoMain: # dem_merged_info=gdal.Info(dem_merged_path,options=infooption) # sampling_f=clip_dem_tif_info['size'][0]/dem_merged_info['size'][0] - out_dir_path=self.__workspace_package_path.replace("\\","\\\\") + this_outSpace_path = out_dir_path this_out_dem_slantRange_path = out_dir_path + "\\" + "dem_slantRange.tiff"#// 地形斜距 this_out_plant_slantRange_path = out_dir_path + "\\" + "flat_slantRange.tiff"#// 平地斜距 @@ -619,10 +674,10 @@ class OrthoMain: this_out_ori_sim_tiff = out_dir_path + "\\" + "RD_ori_sim.tif"#// 局地入射角 if (os.path.exists(this_out_ori_sim_tiff)): shutil.move(this_out_ori_sim_tiff, out_dir_path + "\\" + "ori_sim-ortho.tif") - - this_out_sim_ori_tiff = out_dir_path + "\\" + "RD_sim_ori.tif" # // 局地入射角 - if (os.path.exists(this_out_sim_ori_tiff)): - shutil.move(this_out_sim_ori_tiff, out_dir_path + "\\" + "sim_ori-ortho.tif") + # + # this_out_sim_ori_tiff = out_dir_path + "\\" + "RD_sim_ori.tif" # // 局地入射角 + # if (os.path.exists(this_out_sim_ori_tiff)): + # shutil.move(this_out_sim_ori_tiff, out_dir_path + "\\" + "sim_ori-ortho.tif") # GTC 入射角 GTC_rc_path=os.path.join(self.__workspace_package_path,"ori_sim-ortho.tif") diff --git a/backScattering/BackScattering-S-SAR.xml b/backScattering/BackScattering-S-SAR.xml index 6bcbc6d..e872818 100644 --- a/backScattering/BackScattering-S-SAR.xml +++ b/backScattering/BackScattering-S-SAR.xml @@ -1,7 +1,7 @@ CSAR_202107275419_0001-0 - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\ + D:\micro\SWork\ File ElementAlg @@ -34,7 +34,7 @@ 无需求 - + SLC SLC影像文件 @@ -42,7 +42,7 @@ File tar.gz Cal - F:\MicroWorkspace\S_SAR\HJ2E_MYC_STRIP_008031_E110.8_N19.8_20240323_SLC_HHHV_L10000185601.tar.gz + F:\MicroWorkspace\20240826Ortho\HJ2E_KSC_QPS_008535_E116.5_N43.9_20240425_SLC_AHV_L10000199516.tar.gz True False File @@ -55,9 +55,9 @@ DEM数字高程影像 30m分辨率DEM数字高程影像 File - tif + File Cal - F:\MicroWorkspace\COPDEM\COPDEM_Int16\109E15N_COP30.tif + F:\MicroWorkspace\20240826Ortho\dem True True File @@ -65,6 +65,25 @@ 0 DEM + + baseMap + 哨兵底图数据 + 同地区哨兵地图用于影像配准 + File + File + Cal + DEFAULT + DEFAULT + DEFAULT + + F:\MicroWorkspace\20240826Ortho\S1GBM + True + False + File + Aux + 0 + baseMap + @@ -74,7 +93,7 @@ File tar.gz Cal - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\BackScattering\Output\HJ2E_MYC_STRIP_008031_E110.8_N19.8_20240323_SLC_HHHV_L10000185601-cal.tar.gz + D:\micro\SWork\BackScattering\Output\HJ2E_KSC_QPS_008535_E116.5_N43.9_20240425_SLC_AHV_L10000199516-cal.tar.gz DEFAULT DEFAULT DEFAULT diff --git a/backScattering/BackScatteringAlg.py b/backScattering/BackScatteringAlg.py index 0786f99..37b4dcd 100644 --- a/backScattering/BackScatteringAlg.py +++ b/backScattering/BackScatteringAlg.py @@ -115,6 +115,58 @@ class ScatteringAlg: ImageHandler.write_img(out_sar_tif, proj, geotrans, tif_array, 0) return True + @staticmethod + def sar_backscattering_coef_RPC(in_sar_tif, meta_file_path, out_sar_tif, replece_VV=False, is_DB=True): + + # 读取原始SAR影像 + proj, geotrans, in_data = ImageHandler.read_img(in_sar_tif) + + # 计算强度信息 + I = np.array(in_data[0], dtype="float32") + Q = np.array(in_data[1], dtype="float32") + + where_9999_0 = np.where(I == -9999) + where_9999_1 = np.where(Q == -9999) + I[where_9999_0] = 1.0 + Q[where_9999_1] = 1.0 + + I2 = np.square(I) + Q2 = np.square(Q) + intensity_arr = I2 + Q2 + + # 获取极化类型 + if 'HH' in os.path.basename(in_sar_tif): + polarization = 'HH' + elif 'HV' in os.path.basename(in_sar_tif): + polarization = 'HV' + elif 'VH' in os.path.basename(in_sar_tif): + polarization = 'VH' + elif 'VV' in os.path.basename(in_sar_tif): + polarization = 'VV' + if replece_VV: + polarization = 'HV' # 土壤水分算法中可能会用HV替换VV + elif 'DH' in os.path.basename(in_sar_tif): + polarization = 'HH' + else: + raise Exception('there are not HH、HV、VH、VV in path:', in_sar_tif) + + # 获取参数 + QualifyValue = MetaDataHandler.get_QualifyValue(meta_file_path, polarization) + # Kdb = MetaDataHandler.get_Kdb(meta_file_path, polarization) + Kdb = 0 + + # 计算后向散射系数 + # 对数形式 + coef_arr = 10 * (np.log10(intensity_arr * ((QualifyValue / 32767) ** 2))) - Kdb + coef_arr[np.isnan(coef_arr)] = 0 + coef_arr[np.isinf(coef_arr)] = 0 + coef_arr[where_9999_0] = 0 + coef_arr[where_9999_1] = 0 + ## 输出的SAR后向散射系数产品 + ImageHandler.write_img(out_sar_tif, proj, geotrans, coef_arr, 0) + + return True + @staticmethod def lin_to_db(lin_path, db_path): proj, geotrans, in_data = ImageHandler.read_img(lin_path) @@ -1023,6 +1075,34 @@ class DEMProcess(object): time.sleep(3) #gdal.CloseDir(out_DEM) return out_DEM + + @staticmethod + def bsMap_merged(in_bsMap_path, meta_file_path, out_bsMap_path): + ''' + DEM重采样函数,默认坐标系为WGS84 + agrs: + in_dem_path: 输入的DEM文件夹路径 + meta_file_path: 输入的xml元文件路径 + out_dem_path: 输出的DEM文件夹路径 + ''' + # 读取文件夹中所有的DEM + bsMap_file_paths = [os.path.join(in_bsMap_path, dem_name) for dem_name in os.listdir(in_bsMap_path) if + dem_name.find(".tif") >= 0 and dem_name.find(".tif.") == -1] + spatialreference = osr.SpatialReference() + spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree + spatialproj = spatialreference.ExportToWkt() # 导出投影结果 + # 将DEM拼接成一张大图 + mergeFile = gdal.BuildVRT(os.path.join(out_bsMap_path, "mergedBsMap_VRT.tif"), bsMap_file_paths) + out_DEM = os.path.join(out_bsMap_path, "MergedBsMap.tif") + gdal.Warp(out_DEM, + mergeFile, + format="GTiff", + dstSRS=spatialproj, + dstNodata=-9999, + outputType=gdal.GDT_Float32) + time.sleep(3) + # gdal.CloseDir(out_DEM) + return out_DEM @staticmethod def dem_resampled(in_dem_path,out_dem_path,samling_f): in_dem=gdal.Open(in_dem_path,gdalconst.GA_ReadOnly) @@ -1644,6 +1724,13 @@ class IndirectOrthorectification(Orthorectification): print(os.system(exe_cmd)) print("==========================================================================") + def get_offset(self, baseMap, in_sar, in_sar_sigma): + exe = r".\baseTool\x64\calOffset\calOffset.exe" + exe_cmd = r"set PROJ_LIB=.\baseTool\x64\Release; & {0} {1} {2} {3}".format(exe, baseMap, in_sar, in_sar_sigma) + print(exe_cmd) + print(os.system(exe_cmd)) + print("==========================================================================") + def calInterpolation_bil_Wgs84_rc_sar_sigma(self, parameter_path, dem_rc, in_sar, out_sar): ''' # std::cout << "mode 11"; diff --git a/backScattering/BackScatteringMain.py b/backScattering/BackScatteringMain.py index 5270160..d883ffc 100644 --- a/backScattering/BackScatteringMain.py +++ b/backScattering/BackScatteringMain.py @@ -209,8 +209,10 @@ class ScatteringMain: self.__workspace_preprocessed_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", "preprocessed") # self.__workspace_path + EXE_NAME + r"\Temporary\preprocessed""\\" self.__workspace_processing_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary","processing\\") #self.__workspace_path + EXE_NAME + r"\Temporary\processing""\\" self.__workspace_origin_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", "origin") + self.__workspace_baseMap_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'baseMap') path_list = [self.__workspace_preprocessing_path, self.__workspace_preprocessed_path, - self.__workspace_processing_path, self.__workspace_origin_path] + self.__workspace_processing_path, self.__workspace_origin_path, + self.__workspace_baseMap_path] for path in path_list: if os.path.exists(path): @@ -282,6 +284,61 @@ class ScatteringMain: pp().cut_img(sim_ori_process, sim_ori, shp_path) return sim_ori_process + def cut_dem(self, dem_merged_path, meta_file_path): + _, scopes = DictXml(meta_file_path).get_extend() + intersect_polygon = pp().intersect_polygon(scopes) + if intersect_polygon is None: + raise Exception('cal intersect box fail!') + shp_path = os.path.join(self.__workspace_preprocessing_path, 'IntersectPolygon.shp') + if pp().write_polygon_shp(shp_path, intersect_polygon, 4326) is False: + raise Exception('create intersect shp fail!') + dem_process = os.path.join(self.__workspace_preprocessing_path, 'dem_cut.tif') + pp().cut_img(dem_process, dem_merged_path, shp_path) + return dem_process + + def correct_sim_ori(self, Orthorectification, slc_paths, bsMap_merged_path, out_dir_path): + # 对映射表进行校正 + sim_ori_tiff = out_dir_path + "\\" + "RD_sim_ori.tif" + out_sim_ori = out_dir_path + "\\" + "sim_ori-ortho.tif" + parameter_path = os.path.join(self.__workspace_processing_path, "orth_para.txt") + in_tif_paths = list(glob.glob(os.path.join(slc_paths, '*.tiff'))) + out_rpc_db = os.path.join(self.__workspace_baseMap_path, 'rpc_line.tif') + alg.sar_backscattering_coef_RPC(in_tif_paths[0], self.__in_processing_paras['META'], out_rpc_db) + + db_tif_path = os.path.join(self.__workspace_baseMap_path, 'rpc_db_geo.tif') + + Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, sim_ori_tiff, out_rpc_db, + db_tif_path) + dataset = ImageHandler().get_dataset(db_tif_path) + baseMapCut = os.path.join(self.__workspace_baseMap_path, 'baseMapCut.tif') + inputCut = os.path.join(self.__workspace_baseMap_path, 'inputCut.tif') + baseMapResample = os.path.join(self.__workspace_baseMap_path, 'baseMapCut_Resample.tif') + shpCenterFile = os.path.join(self.__workspace_baseMap_path, 'shpCenter.shp') + center_scopes = (ImageHandler().get_center_scopes(dataset),) + intersect_polygon = pp().intersect_polygon(center_scopes) + if intersect_polygon is None: + raise Exception('create intersect shp fail!') + if pp().write_polygon_shp(shpCenterFile, intersect_polygon, 4326) is False: + raise Exception('create intersect shp fail!') + pp().cut_img(baseMapCut, bsMap_merged_path, shpCenterFile) + pp().cut_img(inputCut, db_tif_path, shpCenterFile) + pp().resampling_by_scale(baseMapCut, baseMapResample, inputCut) + in_sar_png = self.imageHandler.write_view(inputCut) + baseMap_png = self.imageHandler.write_view(baseMapResample) + Orthorectification.get_offset(in_sar_png,baseMap_png, inputCut) + off_txt = os.path.join(os.path.dirname(inputCut), 'off.txt') + with open(off_txt, 'r') as f: + data = f.readlines() + x = float(data[0]) + y = float(data[1]) + im_proj, im_geotrans, im_arr = self.imageHandler.read_img(sim_ori_tiff) + lon_new = im_geotrans[0] + x + lat_new = im_geotrans[3] - y + im_geosNew = [lon_new, im_geotrans[1], im_geotrans[2], lat_new, im_geotrans[4], im_geotrans[5]] + ImageHandler().write_img(out_sim_ori, im_proj, im_geosNew, im_arr) + os.remove(sim_ori_tiff) + return out_sim_ori + def process_handle(self,start): in_tif_paths = list(glob.glob(os.path.join(self.__in_processing_paras['SLC'], '*.tif'))) if in_tif_paths == []: @@ -320,6 +377,11 @@ class ScatteringMain: meta_file_path = self.__in_processing_paras['META'] # .meta文件路径 out_dem_path = self.__workspace_preprocessing_path dem_merged_path=DEMProcess.dem_merged(in_dem_path, meta_file_path, out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif + dem_path = self.cut_dem(dem_merged_path, meta_file_path) + + bsMap = self.__in_processing_paras['baseMap'] + ortho_bsMap_path = self.__workspace_baseMap_path + bsMap_merged_path = DEMProcess.bsMap_merged(bsMap, meta_file_path, ortho_bsMap_path) in_slc_path=None for slc_path in in_tif_paths: @@ -328,7 +390,10 @@ class ScatteringMain: break # 获取校正模型后 - Orthorectification.preCaldem_sar_rc(dem_merged_path,in_slc_path,self.__workspace_preprocessing_path,self.__workspace_processing_path.replace("\\","\\\\")) # 初步筛选坐标范围 + Orthorectification.preCaldem_sar_rc(dem_path,in_slc_path,self.__workspace_preprocessing_path,self.__workspace_processing_path.replace("\\","\\\\")) # 初步筛选坐标范围 + out_dir_path = self.__workspace_processing_path.replace("\\", "\\\\") + sim_ori_rpc = self.correct_sim_ori(Orthorectification, self.__in_processing_paras['SLC'], bsMap_merged_path, + out_dir_path) logger.info('progress bar: 40%') # clip_dem_reample_path=os.path.join(self.__workspace_preprocessing_path, "SAR_dem.tiff") # infooption=gdal.InfoOptions("-json") @@ -336,7 +401,6 @@ class ScatteringMain: # dem_merged_info=gdal.Info(dem_merged_path,options=infooption) # sampling_f=clip_dem_tif_info['size'][0]/dem_merged_info['size'][0] # 处理RD 的结果 - out_dir_path=self.__workspace_processing_path.replace("\\","\\\\") this_outSpace_path = out_dir_path this_out_dem_slantRange_path = os.path.join(out_dir_path, "dem_slantRange.tiff") # out_dir_path + "\\" + "dem_slantRange.tiff"#// 地形斜距 this_out_plant_slantRange_path = os.path.join(out_dir_path, "flat_slantRange.tiff") # out_dir_path + "\\" + "flat_slantRange.tiff"#// 平地斜距 @@ -408,7 +472,7 @@ class ScatteringMain: # out_tif_path, # lin_tif_path) - Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, this_in_rpc_x_y_path, + Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, sim_ori_rpc, out_tif_path, lin_tif_path) tempout_tif_path = os.path.join(self.__workspace_processing_path, diff --git a/backScattering/backScattering.rar b/backScattering/backScattering.rar deleted file mode 100644 index 53ca316..0000000 Binary files a/backScattering/backScattering.rar and /dev/null differ diff --git a/landcover-S-SAR/LandCover-S-SAR.xml b/landcover-S-SAR/LandCover-S-SAR.xml index 4c6bfb3..38dfcf6 100644 --- a/landcover-S-SAR/LandCover-S-SAR.xml +++ b/landcover-S-SAR/LandCover-S-SAR.xml @@ -1,7 +1,7 @@ CSAR_202107275419_0001-0 - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\ + D:\micro\SWork\ File ElementAlg @@ -38,7 +38,7 @@ File tar.gz Man - F:\MicroWorkspace\S_SAR\AHV\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-Ortho.tar.gz + F:\MicroWorkspace\20240814tw\HJ2E_KRN_QPS_008852_E110.6_N20.1_20240515_SLC_AHV_L10000208028-Ortho.tar.gz True False File @@ -51,9 +51,9 @@ 标记数据 标记的样本数据 File - zip + csv Man - F:\al_zhongji\S-SAR-data\landCover\SSAR_landcover_landaCoverSample.zip + F:\MicroWorkspace\20240814tw\LandCover.csv True True UploadInput @@ -81,9 +81,8 @@ 极化特征组合 可选极化特征组合一、共14种特征(编号依次为0-13) Freeman:表面散射p_s(0)、偶次散射p_d(1)、体散射p_v(2); - Touzi:散射角α_s(3)、散射相位ϕ_α(4)、目标散射对称度τ(5)、相对能量λ_i(6); - Yamaguchi:表面散射f_s(7)、二次散射f_d(8)、体散射f_v(9)、螺旋体散射f_h(10); - Cloude-Pottier:分解散射熵H(11)、反熵A(12)、平均散射角α(13) + Yamaguchi:表面散射f_s(3)、二次散射f_d(4)、体散射f_v(5)、螺旋体散射f_h(6); + Cloude-Pottier:分解散射熵H(7)、反熵A(8)、平均散射角α(9) Value string Man @@ -104,7 +103,7 @@ File tar.gz Man - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\LandCover\Output\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-Ortho-LANDCLASS.tar.gz + D:\micro\SWork\LandCover\Output\HJ2E_KRN_QPS_008852_E110.6_N20.1_20240515_SLC_AHV_L10000208028-Ortho-LANDCLASS.tar.gz diff --git a/landcover-S-SAR/LandCoverAuxData.py b/landcover-S-SAR/LandCoverAuxData.py index 4cec7c8..fadcc19 100644 --- a/landcover-S-SAR/LandCoverAuxData.py +++ b/landcover-S-SAR/LandCoverAuxData.py @@ -122,10 +122,17 @@ class LandCoverMeasCsv: # raise Exception('there are empty data!', train_data) if len(train_data_list) <= 1: raise Exception('there is only one label type!', train_data_list) - + num_list = [] + for train_data in train_data_list: + if not len(train_data[3]) == 0: + num_list.append(len(train_data[3])) + try: + max_num = np.min(num_list) + except Exception as e: + print(e) for train_data in train_data_list: logger.info(str(train_data[0]) + "," + str(train_data[2]) +"," + "num:" + str(len(train_data[3]))) - max_num = self.__max_tran__num_per_class + # max_num = self.__max_tran__num_per_class logger.info("max number =" + str(max_num) + ", random select" + str(max_num) + " point as train data!") if (len(train_data[3]) > max_num): train_data[3] = random.sample(train_data[3], max_num) diff --git a/landcover-S-SAR/LandCoverMain.py b/landcover-S-SAR/LandCoverMain.py index a6677c8..fbb61bf 100644 --- a/landcover-S-SAR/LandCoverMain.py +++ b/landcover-S-SAR/LandCoverMain.py @@ -289,16 +289,11 @@ class LandCoverMain: else: self.__FeatureInput['Freeman'] = [i] elif 3 <= i < 7: - if 'Touzi' in self.__FeatureInput: - self.__FeatureInput['Touzi'].append(i) - else: - self.__FeatureInput['Touzi'] = [i] - elif 7 <= i < 11: if 'Yamaguchi' in self.__FeatureInput: self.__FeatureInput['Yamaguchi'].append(i) else: self.__FeatureInput['Yamaguchi'] = [i] - elif 11 <= i < 14: + elif 7 <= i < 10: if 'Cloude' in self.__FeatureInput: self.__FeatureInput['Cloude'].append(i) else: @@ -308,24 +303,19 @@ class LandCoverMain: def __FeatureParaInit(self): self.__FeatureMap["Freeman"] = [0, 1, 2] - self.__FeatureMap["Touzi"] = [3, 4, 5, 6] - self.__FeatureMap["Yamaguchi"] = [7, 8, 9, 10] - self.__FeatureMap["Cloude"] = [11, 12, 13] + self.__FeatureMap["Yamaguchi"] = [3, 4, 5, 6] + self.__FeatureMap["Cloude"] = [7, 8, 9] self.___FeatureFileNameMap[0] = ['Freeman', "Freeman_Odd.bin"] self.___FeatureFileNameMap[1] = ['Freeman', "Freeman_Dbl.bin"] self.___FeatureFileNameMap[2] = ['Freeman', "Freeman_Vol.bin"] - self.___FeatureFileNameMap[3] = ['Touzi', "alpha.bin"] - self.___FeatureFileNameMap[4] = ['Touzi', "phi.bin"] - self.___FeatureFileNameMap[5] = ['Touzi', "tau.bin"] - self.___FeatureFileNameMap[6] = ['Touzi', "psi.bin"] - self.___FeatureFileNameMap[7] = ['Yamaguchi', "Yamaguchi4_Odd.bin"] - self.___FeatureFileNameMap[8] = ['Yamaguchi', "Yamaguchi4_Dbl.bin"] - self.___FeatureFileNameMap[9] = ['Yamaguchi', "Yamaguchi4_Vol.bin"] - self.___FeatureFileNameMap[10] = ['Yamaguchi', "Yamaguchi4_Hlx.bin"] - self.___FeatureFileNameMap[11] = ['Cloude', "anisotropy.bin"] - self.___FeatureFileNameMap[12] = ['Cloude', "entropy.bin"] - self.___FeatureFileNameMap[13] = ['Cloude', "alpha.bin"] + self.___FeatureFileNameMap[3] = ['Yamaguchi', "Yamaguchi4_Odd.bin"] + self.___FeatureFileNameMap[4] = ['Yamaguchi', "Yamaguchi4_Dbl.bin"] + self.___FeatureFileNameMap[5] = ['Yamaguchi', "Yamaguchi4_Vol.bin"] + self.___FeatureFileNameMap[6] = ['Yamaguchi', "Yamaguchi4_Hlx.bin"] + self.___FeatureFileNameMap[7] = ['Cloude', "anisotropy.bin"] + self.___FeatureFileNameMap[8] = ['Cloude', "entropy.bin"] + self.___FeatureFileNameMap[9] = ['Cloude', "alpha.bin"] def create_roi(self, img_path): """ @@ -515,11 +505,11 @@ class LandCoverMain: train_data_list = pm.api_read_measure() train_data_dic = csvh.trans_landCover_list2dic(train_data_list) - csvh_roi = csvHandle(self.__rows_geo, self.__cols_geo) + # csvh_roi = csvHandle(self.__rows_geo, self.__cols_geo) # train_data_dic = csvh_roi.trans_landCover_measuredata_dic(csvh_roi.readcsv(self.__processing_paras['LabelData']), self.__preprocessed_paras['ori_sim'], MAX_TRAN_NUM) - label_img = csvh_roi.get_roi_img() - if(len(label_img) != 0): - self.imageHandler.write_img(os.path.join(self.__workspace_processing_path, "label_img.tif"),"",[0,0,0,0,0,0],label_img) + # label_img = csvh_roi.get_roi_img() + # if(len(label_img) != 0): + # self.imageHandler.write_img(os.path.join(self.__workspace_processing_path, "label_img.tif"),"",[0,0,0,0,0,0],label_img) logger.info("read csv data success!") logger.info('progress bar: 20%') @@ -550,7 +540,7 @@ class LandCoverMain: logger.info('progress bar: 50%') # 生成最优特征子集训练集 - X_train, Y_train, optimal_feature = ml.gene_optimal_train_set(train_data_dic, feature_geo, 0.07, 0.85) + X_train, Y_train, optimal_feature = ml.gene_optimal_train_set(train_data_dic, feature_geo, 0.5, 0.85) # 训练模型 # cost = self.__processing_paras["Cost"] diff --git a/soilMoisture-S-SAR/SoilMoisture.xml b/soilMoisture-S-SAR/SoilMoisture.xml index 1bfca2c..0cddc1e 100644 --- a/soilMoisture-S-SAR/SoilMoisture.xml +++ b/soilMoisture-S-SAR/SoilMoisture.xml @@ -1,7 +1,7 @@ CSAR_202107275419_0001-0 - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\ + D:\micro\SWork\ File ElementAlg @@ -40,7 +40,7 @@ DEFAULT Cal - F:\MicroWorkspace\S_SAR\AHV\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-cal.tar.gz + F:\MicroWorkspace\20240814tw\HJ2E_KRN_QPS_008852_E110.6_N20.1_20240515_SLC_AHV_L10000208028-cal.tar.gz True False File @@ -53,12 +53,12 @@ 地表覆盖类型数据 经过地理定标(WGS84)的地表覆盖类型数据 File - zip + tif DEFAULT DEFAULT DEFAULT Cal - F:\al_zhongji\S-SAR-data\soilMoisture\SSAR_soilMoisture_landCover.zip + F:\MicroWorkspace\20240814tw\global30.tif True False UploadTable @@ -74,7 +74,7 @@ Value string Man - empty + 10;20;30;40;70;90 DEFAULT DEFAULT DEFAULT @@ -90,12 +90,12 @@ NDVI数据 经过地理定标(WGS84)的NDVI数据 File - zip + tif DEFAULT DEFAULT DEFAULT Cal - F:\al_zhongji\S-SAR-data\soilMoisture\SSAR_soilMoisture_NDVI.zip + F:\MicroWorkspace\20240814tw\S2_NDVImed.tif True False UploadTable @@ -144,12 +144,12 @@ NDWI数据 经过地理定标(WGS84)的NDWI数据 File - zip + tif DEFAULT DEFAULT DEFAULT Cal - F:\al_zhongji\S-SAR-data\soilMoisture\SSAR_soilMoisture_NDWI.zip + F:\MicroWorkspace\20240814tw\S2_NDWImed.tif True False UploadTable @@ -164,7 +164,7 @@ Value float Man - -22.482554048434324 + -6.765447191190488 True False UploadInput @@ -182,7 +182,7 @@ Value float Man - -10.72946251632336 + 6.235438228712402 True False UploadInput @@ -200,7 +200,7 @@ Value float Man - -0.08238130673792357 + 861.6172697366004 True False UploadInput @@ -218,7 +218,7 @@ Value float Man - 1.0194495140476119 + -0.005217401735104278 True False UploadInput @@ -236,7 +236,7 @@ Value float Man - 6.107713980885245 + -29.78633099522965 True False UploadInput @@ -254,7 +254,7 @@ Value float Man - -4.496951628949385 + 23.39131797361224 True False UploadInput @@ -274,7 +274,7 @@ File tar.gz Man - D:\613NET\ComputingNode\ftproot\Production\PL_20240507155658_0002\SoilMoisture\Output\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-cal-SMC.tar.gz + D:\micro\SWork\SoilMoisture\Output\HJ2E_KRN_QPS_008852_E110.6_N20.1_20240515_SLC_AHV_L10000208028-cal-SMC.tar.gz DEFAULT DEFAULT DEFAULT diff --git a/soilMoisture-S-SAR/SoilMoistureMain.py b/soilMoisture-S-SAR/SoilMoistureMain.py index 7b6f2b5..b638cae 100644 --- a/soilMoisture-S-SAR/SoilMoistureMain.py +++ b/soilMoisture-S-SAR/SoilMoistureMain.py @@ -382,8 +382,10 @@ class MoistureMain: logger.info('cal soil_moisture success!') proj, geos, data = self.imageHandler.read_img(product_path) - data[data < soil_moisture_value_min] = soil_moisture_value_min - data[data > soil_moisture_value_max] = soil_moisture_value_max + # data[data < soil_moisture_value_min] = soil_moisture_value_min + # data[data > soil_moisture_value_max] = soil_moisture_value_max + data[data < soil_moisture_value_min] = -9999 + data[data > soil_moisture_value_max] = -9999 self.imageHandler.write_img(product_path, proj, geos, data) # 生成快视图 diff --git a/tool/algorithm/image/ImageHandle.py b/tool/algorithm/image/ImageHandle.py index 818209d..82b3d8e 100644 --- a/tool/algorithm/image/ImageHandle.py +++ b/tool/algorithm/image/ImageHandle.py @@ -677,9 +677,85 @@ class ImageHandler: scopes = [[min_lon, max_lat], [max_lon, max_lat], [min_lon, min_lat], [max_lon, min_lat]] return scopes + def get_center_scopes(self, dataset): + if dataset is None: + return None + + img_geotrans = dataset.GetGeoTransform() + if img_geotrans is None: + return None + + width = dataset.RasterXSize # 栅格矩阵的列数 + height = dataset.RasterYSize # 栅格矩阵的行数 + + x_split = int(width/5) + y_split = int(height/5) + img_col_start = x_split * 1 + img_col_end = x_split * 3 + img_row_start = y_split * 1 + img_row_end = y_split *3 + + point_upleft = self.trans_rowcol2geo(img_geotrans, img_col_start, img_row_start) + point_upright = self.trans_rowcol2geo(img_geotrans, img_col_end, img_row_start) + point_downleft = self.trans_rowcol2geo(img_geotrans, img_col_start, img_row_end) + point_downright = self.trans_rowcol2geo(img_geotrans, img_col_end, img_row_end) + + return [point_upleft, point_upright, point_downleft, point_downright] + def write_view(self, tif_path, color_img=False, quick_view_path=None): + """ + 生成快视图,默认快视图和影像同路径且同名 + :param tif_path:影像路径 + :param color_img:是否生成随机伪彩色图 + :param quick_view_path:快视图路径 + """ + if quick_view_path is None: + quick_view_path = os.path.splitext(tif_path)[0]+'.jpg' + + n = self.get_bands(tif_path) + if n == 1: # 单波段 + t_data = self.get_data(tif_path) + else: # 多波段,转为强度数据 + t_data = self.get_data(tif_path) + t_data = t_data.astype(float) + t_data = np.sqrt(t_data[0] ** 2 + t_data[1] ** 2) + t_data[np.isnan(t_data)] = 0 + t_data[np.where(t_data == -9999)] = 0 + t_r = self.get_img_height(tif_path) + t_c = self.get_img_width(tif_path) + q_r = t_r + q_c = t_c + + if color_img is True: + # 生成伪彩色图 + img = np.zeros((t_r, t_c, 3), dtype=np.uint8) # (高,宽,维度) + u = np.unique(t_data) + for i in u: + if i != 0: + w = np.where(t_data == i) + img[w[0], w[1], 0] = np.random.randint(0, 255) # 随机生成一个0到255之间的整数 可以通过挑参数设定不同的颜色范围 + img[w[0], w[1], 1] = np.random.randint(0, 255) + img[w[0], w[1], 2] = np.random.randint(0, 255) + + img = cv2.resize(img, (q_c, q_r)) # (宽,高) + cv2.imwrite(quick_view_path, img) + # cv2.imshow("result4", img) + # cv2.waitKey(0) + else: + # 灰度图 + min = np.percentile(t_data, 2) # np.nanmin(t_data) + max = np.percentile(t_data, 98) # np.nanmax(t_data) + # if (max - min) < 256: + t_data = (t_data - min) / (max - min) * 255 + out_img = Image.fromarray(t_data) + out_img = out_img.resize((q_c, q_r)) # 重采样 + out_img = out_img.convert("L") # 转换成灰度图 + out_img.save(quick_view_path) + + return quick_view_path + if __name__ == '__main__': - fn = r"D:\micro\SWork\Ortho\Output\HJ2E_KSC_STRIP_006199_E85.8_N44.2_20231124_SLC_HHHV_L10000135594-Ortho\HJ2E_KSC_STRIP_006199_E85.8_N44.2_20231124_SLC_h_h_L10000135594-Ortho.tif" + fn = r"C:\Users\sxwcc\Downloads\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-cal-SMC\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-cal-SMC.tif" ImageHandler().write_quick_view(fn) # path = r'D:\BaiduNetdiskDownload\GZ\lon.rdr' # path2 = r'D:\BaiduNetdiskDownload\GZ\lat.rdr' diff --git a/tool/algorithm/transforml1a/transHandle.py b/tool/algorithm/transforml1a/transHandle.py index d857f2a..64ad4be 100644 --- a/tool/algorithm/transforml1a/transHandle.py +++ b/tool/algorithm/transforml1a/transHandle.py @@ -147,8 +147,10 @@ class TransImgL1A: # 处理最大最小范围 xmin = 0 if 0 > xmin else xmin ymin = 0 if 0 > ymin else ymin - xmax = ori2geo_img_width if ori2geo_img_width > xmax else xmax - ymax = ori2geo_img_height if ori2geo_img_height > ymax else ymax + # xmax = ori2geo_img_width if ori2geo_img_width > xmax else xmax + # ymax = ori2geo_img_height if ori2geo_img_height > ymax else ymax + xmax = xmax if ori2geo_img_width > xmax else ori2geo_img_width + ymax = ymax if ori2geo_img_height > ymax else ori2geo_img_height # 判断条件 xmax = xmax + 1 if xmax == xmin else xmax diff --git a/tool/algorithm/xml/AnalysisXml.py b/tool/algorithm/xml/AnalysisXml.py index 2dada82..5910cc5 100644 --- a/tool/algorithm/xml/AnalysisXml.py +++ b/tool/algorithm/xml/AnalysisXml.py @@ -45,14 +45,14 @@ class DictXml: point_downright = [float(bottomRight.find("longitude").text), float(bottomRight.find("latitude").text)] scopes = [point_upleft, point_upright, point_downleft, point_downright] - point_upleft_buf = [float(topLeft.find("longitude").text) - 0.5, float(topLeft.find("latitude").text) + 0.5] - point_upright_buf = [float(topRight.find("longitude").text) + 0.5, float(topRight.find("latitude").text) + 0.5] - point_downleft_buf = [float(bottomLeft.find("longitude").text) - 0.5, - float(bottomLeft.find("latitude").text) - 0.5] - point_downright_buf = [float(bottomRight.find("longitude").text) + 0.5, - float(bottomRight.find("latitude").text) - 0.5] - scopes_buf = [point_upleft_buf, point_upright_buf, point_downleft_buf, point_downright_buf] - return scopes_buf + point_upleft_buf = [float(topLeft.find("longitude").text) - 0.6, float(topLeft.find("latitude").text) + 0.6] + point_upright_buf = [float(topRight.find("longitude").text) + 0.6, float(topRight.find("latitude").text) + 0.6] + point_downleft_buf = [float(bottomLeft.find("longitude").text) - 0.6, + float(bottomLeft.find("latitude").text) - 0.6] + point_downright_buf = [float(bottomRight.find("longitude").text) + 0.6, + float(bottomRight.find("latitude").text) - 0.6] + scopes_buf = ([point_upleft_buf, point_upright_buf, point_downleft_buf, point_downright_buf], ) + return scopes, scopes_buf class xml_extend: @@ -114,10 +114,10 @@ class xml_extend: point_downright = [float(BottomRightLongitude.text), float(BottomRightLatitude.text)] scopes = [point_upleft, point_upright, point_downleft, point_downright] - point_upleft_buf = [float(TopLeftLongitude.text) - 0.5, float(TopLeftLatitude.text) + 0.5] - point_upright_buf = [float(TopRightLongitude.text) + 0.5, float(TopRightLatitude.text) + 0.5] - point_downleft_buf = [float(BottomLeftLongitude.text) - 0.5, float(BottomLeftLatitude.text) - 0.5] - point_downright_buf = [float(BottomRightLongitude.text) + 0.5, float(BottomRightLatitude.text) - 0.5] + point_upleft_buf = [float(TopLeftLongitude.text) - 0.6, float(TopLeftLatitude.text) + 0.6] + point_upright_buf = [float(TopRightLongitude.text) + 0.6, float(TopRightLatitude.text) + 0.6] + point_downleft_buf = [float(BottomLeftLongitude.text) - 0.6, float(BottomLeftLatitude.text) - 0.6] + point_downright_buf = [float(BottomRightLongitude.text) + 0.6, float(BottomRightLatitude.text) - 0.6] scopes_buf = [point_upleft_buf, point_upright_buf, point_downleft_buf, point_downright_buf] return scopes diff --git a/tool/csv/csvHandle.py b/tool/csv/csvHandle.py index 5ad7793..e34c0c6 100644 --- a/tool/csv/csvHandle.py +++ b/tool/csv/csvHandle.py @@ -129,6 +129,8 @@ class csvHandle: ch_names = [] positions = [] for data in train_data_list: + if data[3] == []: + continue ids.append(data[0]) class_ids.append(data[1]) ch_names.append(data[2])