forked from KJ135/microproduct-s-sar-mirror
修改ortho-NoS1GBM
parent
80b07bc09f
commit
b2be696a06
|
@ -0,0 +1,17 @@
|
|||
# 输入文件
|
||||
Input/
|
||||
Input2/
|
||||
Input3/
|
||||
Input4/
|
||||
Input5/
|
||||
# 输出文件
|
||||
Temporary/
|
||||
# 忽略工具文件
|
||||
__pycache__/
|
||||
dist/
|
||||
build/
|
||||
Ortho/
|
||||
run_log/
|
||||
*.tiff
|
||||
*.tif
|
||||
*.log
|
|
@ -0,0 +1,49 @@
|
|||
# -*- mode: python ; coding: utf-8 -*-
|
||||
|
||||
|
||||
block_cipher = None
|
||||
|
||||
|
||||
a = Analysis(
|
||||
['OrthoMain.py'],
|
||||
pathex=['.'],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
noarchive=False,
|
||||
)
|
||||
pyz = PYZ(a.pure, a.zipped_data, cipher=block_cipher)
|
||||
|
||||
exe = EXE(
|
||||
pyz,
|
||||
a.scripts,
|
||||
[],
|
||||
exclude_binaries=True,
|
||||
name='OrthOne',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
argv_emulation=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None,
|
||||
)
|
||||
coll = COLLECT(
|
||||
exe,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
name='OrthOne',
|
||||
)
|
|
@ -0,0 +1,103 @@
|
|||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Root>
|
||||
<TaskID>CSAR_202107275419_0001-0</TaskID>
|
||||
<WorkSpace>K:\SSARTest\workspace\</WorkSpace>
|
||||
<AlgCompt>
|
||||
<DataTransModel>File</DataTransModel>
|
||||
<Artificial>ElementAlg</Artificial>
|
||||
<AlgorithmName>Ortho_S_SAR_V2.2</AlgorithmName>
|
||||
<DllName>Ortho_S_SAR_V2.2.exe</DllName>
|
||||
<ChsName>正射校正</ChsName>
|
||||
<AlgorithmDesc>微波卫星3-5级产品生产模型</AlgorithmDesc>
|
||||
<AlgorithmAlias>Ortho-S-SAR-V2.2-1</AlgorithmAlias>
|
||||
<Version>1.0</Version>
|
||||
<AlgorithmClass>辐射类产品_正射校正</AlgorithmClass>
|
||||
<AlgorithmLevel>4</AlgorithmLevel>
|
||||
<AlgoirthmID>Ortho_中科卫星应用德清研究院_2.2</AlgoirthmID>
|
||||
<Author>中科卫星应用德清研究院</Author>
|
||||
<Type>景-算法</Type>
|
||||
<InputTestFilePath>Ortho\\Input6</InputTestFilePath>
|
||||
<InputTestFileName>
|
||||
2599253_San_Francisco
|
||||
</InputTestFileName>
|
||||
<OutputTestFilePath>Ortho\\Output</OutputTestFilePath>
|
||||
<OutputTestFileName>
|
||||
</OutputTestFileName>
|
||||
<jdkVersion>1.8</jdkVersion>
|
||||
<algDevlanguage>python</algDevlanguage>
|
||||
<Environment>
|
||||
<IsCluster>0</IsCluster>
|
||||
<ClusterNum>0</ClusterNum>
|
||||
<OperatingSystem>Windows10</OperatingSystem>
|
||||
<CPU>4核</CPU>
|
||||
<Memory>8GB</Memory>
|
||||
<Storage>25GB</Storage>
|
||||
<NetworkCard>无需求</NetworkCard>
|
||||
<Bandwidth>无需求</Bandwidth>
|
||||
<GPU>无需求</GPU>
|
||||
</Environment>
|
||||
<Utility Satellite="GF3" Sensor="MSS" Resolution="1" />
|
||||
<Inputs ParameterNum="3">
|
||||
<Parameter>
|
||||
<ParaName>SLC</ParaName>
|
||||
<ParaChsName>SLC元文件</ParaChsName>
|
||||
<Description>原始SLC各相关文件和参数</Description>
|
||||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>K:\SSARTest\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458.tar.gz</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>False</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
<InputType>Satellite</InputType>
|
||||
<InputNum>1</InputNum>
|
||||
<DateFrom>GF3A</DateFrom>
|
||||
</Parameter>
|
||||
<Parameter>
|
||||
<ParaName>DEM</ParaName>
|
||||
<ParaChsName>DEM数字高程影像</ParaChsName>
|
||||
<Description>30m分辨率DEM数字高程影像zip</Description>
|
||||
<ParaType>File</ParaType>
|
||||
<DataType>zip</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>K:\SSARTest\115E33N_COP30.zip</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>True</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
<InputType>DEM</InputType>
|
||||
<InputNum>0</InputNum>
|
||||
<DateFrom>DEM</DateFrom>
|
||||
</Parameter>
|
||||
<Parameter>
|
||||
<ParaName>CorrectMethod</ParaName>
|
||||
<ParaChsName>选择校正方法</ParaChsName>
|
||||
<Description>1.RPC;2.RD</Description>
|
||||
<ParaType>int</ParaType>
|
||||
<DataType>int</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>2</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>True</EnMultipleChoice>
|
||||
<Control>UploadInput</Control>
|
||||
<InputType>Aux</InputType>
|
||||
<InputNum>0</InputNum>
|
||||
<DateFrom>Aux</DateFrom>
|
||||
</Parameter>
|
||||
</Inputs>
|
||||
<Outputs ParameterNum="1">
|
||||
<Parameter>
|
||||
<ParaName>OrthoProduct</ParaName>
|
||||
<ParaChsName>产品结果文件</ParaChsName>
|
||||
<Description>产品结果文件</Description>
|
||||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>K:\SSARTest\workspace\Ortho\Output\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-Ortho.tar.gz</ParaValue>
|
||||
<MaxValue>DEFAULT</MaxValue>
|
||||
<MinValue>DEFAULT</MinValue>
|
||||
<OptionValue>DEFAULT</OptionValue>
|
||||
<NoDataValue>DEFAULT</NoDataValue>
|
||||
</Parameter>
|
||||
</Outputs>
|
||||
</AlgCompt>
|
||||
</Root>
|
|
@ -0,0 +1,103 @@
|
|||
<?xml version='1.0' encoding='utf-8'?>
|
||||
<Root>
|
||||
<TaskID>CSAR_202107275419_0001-0</TaskID>
|
||||
<WorkSpace>D:\micro\SWork\</WorkSpace>
|
||||
<AlgCompt>
|
||||
<DataTransModel>File</DataTransModel>
|
||||
<Artificial>ElementAlg</Artificial>
|
||||
<AlgorithmName>Ortho_S_SAR_V2.2</AlgorithmName>
|
||||
<DllName>Ortho_S_SAR_V2.2.exe</DllName>
|
||||
<ChsName>正射校正</ChsName>
|
||||
<AlgorithmDesc>微波卫星3-5级产品生产模型</AlgorithmDesc>
|
||||
<AlgorithmAlias>Ortho-S-SAR-V2.2-1</AlgorithmAlias>
|
||||
<Version>1.0</Version>
|
||||
<AlgorithmClass>辐射类产品_正射校正</AlgorithmClass>
|
||||
<AlgorithmLevel>4</AlgorithmLevel>
|
||||
<AlgoirthmID>Ortho_中科卫星应用德清研究院_2.2</AlgoirthmID>
|
||||
<Author>中科卫星应用德清研究院</Author>
|
||||
<Type>景-算法</Type>
|
||||
<InputTestFilePath>Ortho\\Input6</InputTestFilePath>
|
||||
<InputTestFileName>
|
||||
2599253_San_Francisco
|
||||
</InputTestFileName>
|
||||
<OutputTestFilePath>Ortho\\Output</OutputTestFilePath>
|
||||
<OutputTestFileName>
|
||||
</OutputTestFileName>
|
||||
<jdkVersion>1.8</jdkVersion>
|
||||
<algDevlanguage>python</algDevlanguage>
|
||||
<Environment>
|
||||
<IsCluster>0</IsCluster>
|
||||
<ClusterNum>0</ClusterNum>
|
||||
<OperatingSystem>Windows10</OperatingSystem>
|
||||
<CPU>4核</CPU>
|
||||
<Memory>8GB</Memory>
|
||||
<Storage>25GB</Storage>
|
||||
<NetworkCard>无需求</NetworkCard>
|
||||
<Bandwidth>无需求</Bandwidth>
|
||||
<GPU>无需求</GPU>
|
||||
</Environment>
|
||||
<Utility Satellite="GF3" Sensor="MSS" Resolution="1" />
|
||||
<Inputs ParameterNum="3">
|
||||
<Parameter>
|
||||
<ParaName>SLC</ParaName>
|
||||
<ParaChsName>SLC元文件</ParaChsName>
|
||||
<Description>原始SLC各相关文件和参数</Description>
|
||||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>F:\MicroWorkspace\S_SAR\AHV\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458.tar.gz</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>False</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
<InputType>Satellite</InputType>
|
||||
<InputNum>1</InputNum>
|
||||
<DateFrom>GF3A</DateFrom>
|
||||
</Parameter>
|
||||
<Parameter>
|
||||
<ParaName>DEM</ParaName>
|
||||
<ParaChsName>DEM数字高程影像</ParaChsName>
|
||||
<Description>30m分辨率DEM数字高程影像tif</Description>
|
||||
<ParaType>File</ParaType>
|
||||
<DataType>zip</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>F:\al_zhongji\S-SAR-data\backScattering\DEM\115E33N_COP30.zip</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>True</EnMultipleChoice>
|
||||
<Control>File</Control>
|
||||
<InputType>DEM</InputType>
|
||||
<InputNum>0</InputNum>
|
||||
<DateFrom>DEM</DateFrom>
|
||||
</Parameter>
|
||||
<Parameter>
|
||||
<ParaName>CorrectMethod</ParaName>
|
||||
<ParaChsName>选择校正方法</ParaChsName>
|
||||
<Description>1.RPC;2.RD</Description>
|
||||
<ParaType>int</ParaType>
|
||||
<DataType>int</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>2</ParaValue>
|
||||
<EnModification>True</EnModification>
|
||||
<EnMultipleChoice>True</EnMultipleChoice>
|
||||
<Control>UploadInput</Control>
|
||||
<InputType>Aux</InputType>
|
||||
<InputNum>0</InputNum>
|
||||
<DateFrom>Aux</DateFrom>
|
||||
</Parameter>
|
||||
</Inputs>
|
||||
<Outputs ParameterNum="1">
|
||||
<Parameter>
|
||||
<ParaName>OrthoProduct</ParaName>
|
||||
<ParaChsName>产品结果文件</ParaChsName>
|
||||
<Description>产品结果文件</Description>
|
||||
<ParaType>File</ParaType>
|
||||
<DataType>tar.gz</DataType>
|
||||
<ParaSource>Cal</ParaSource>
|
||||
<ParaValue>D:\micro\SWork\Ortho\Output\HJ2E_MYC_QPS_001752_E118.0_N37.7_20230204_SLC_AHV_L10000010458-Ortho.tar.gz</ParaValue>
|
||||
<MaxValue>DEFAULT</MaxValue>
|
||||
<MinValue>DEFAULT</MinValue>
|
||||
<OptionValue>DEFAULT</OptionValue>
|
||||
<NoDataValue>DEFAULT</NoDataValue>
|
||||
</Parameter>
|
||||
</Outputs>
|
||||
</AlgCompt>
|
||||
</Root>
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,415 @@
|
|||
# 一米正射辅助数据处理类
|
||||
import time
|
||||
import math
|
||||
import numpy as np
|
||||
from osgeo import gdal
|
||||
from xml.etree.ElementTree import ElementTree
|
||||
from scipy.optimize import leastsq
|
||||
|
||||
|
||||
class OrthoAuxData:
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
@staticmethod
|
||||
def time_stamp(tm):
|
||||
list = tm.split(':')
|
||||
sec = math.ceil(float(list[2]))
|
||||
tm1 = list[0] + ':' + list[1] + ':' + str(sec)
|
||||
tmArr = time.strptime(tm1, "%Y-%m-%d %H:%M:%S")
|
||||
# tmArr = time.strptime(tm1, "%Y-%m-%d %H:%M:%S.%f")
|
||||
ts = float(time.mktime(tmArr)) # 转换为时间戳
|
||||
return ts
|
||||
|
||||
@staticmethod
|
||||
def read_meta(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
T = []
|
||||
Xs = []
|
||||
Ys = []
|
||||
Zs = []
|
||||
Vsx = []
|
||||
Vsy = []
|
||||
Vsz = []
|
||||
GPS_data = root.find('GPS')
|
||||
for child in GPS_data:
|
||||
Xs.append(float(child.find('xPosition').text))
|
||||
Ys.append(float(child.find('yPosition').text))
|
||||
Zs.append(float(child.find('zPosition').text))
|
||||
Vsx.append(float(child.find('xVelocity').text))
|
||||
Vsy.append(float(child.find('yVelocity').text))
|
||||
Vsz.append(float(child.find('zVelocity').text))
|
||||
tm = child.find('TimeStamp').text
|
||||
ts = OrthoAuxData.time_stamp(tm)
|
||||
T.append(ts)
|
||||
meta_data = [Xs, Ys, Zs, Vsx, Vsy, Vsz]
|
||||
return T, meta_data
|
||||
|
||||
@staticmethod
|
||||
def read_control_points(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
imageinfo = root.find('imageinfo')
|
||||
center = imageinfo.find('center')
|
||||
corner = imageinfo.find('corner')
|
||||
ctrl_pts = [[] for i in range(2)]
|
||||
ctrl_pts[0].append(float(center.find('longitude').text))
|
||||
ctrl_pts[1].append(float(center.find('latitude').text))
|
||||
|
||||
for child in corner:
|
||||
ctrl_pts[0].append(float(child.find('longitude').text))
|
||||
ctrl_pts[1].append(float(child.find('latitude').text))
|
||||
return ctrl_pts
|
||||
|
||||
@staticmethod
|
||||
def read_dem(dem_resampled_path, flag=1):
|
||||
in_ds = gdal.Open(dem_resampled_path)
|
||||
gt = list(in_ds.GetGeoTransform())
|
||||
bands_num = in_ds.RasterCount
|
||||
x_size = in_ds.RasterXSize
|
||||
y_size = in_ds.RasterYSize
|
||||
pstn_arr = np.zeros([y_size, x_size, 3], dtype=np.float)
|
||||
for i in range(1, bands_num + 1):
|
||||
data = in_ds.GetRasterBand(i).ReadAsArray(0, 0, x_size, y_size)
|
||||
for y in range(y_size):
|
||||
for x in range(x_size):
|
||||
longitude = gt[0] + x * gt[1]
|
||||
latitude = gt[3] + y * gt[5]
|
||||
altitude = data[y, x]
|
||||
if flag == 1:
|
||||
pstn = OrthoAuxData.LLA2XYZ(longitude, latitude, altitude)
|
||||
else:
|
||||
pstn = [longitude, latitude, altitude]
|
||||
pstn_arr[y, x, 0] = pstn[0]
|
||||
pstn_arr[y, x, 1] = pstn[1]
|
||||
pstn_arr[y, x, 2] = pstn[2]
|
||||
|
||||
del in_ds, data
|
||||
return pstn_arr
|
||||
|
||||
@staticmethod
|
||||
def read_demM(dem_resampled_path, part_cnt, r_cnt, c_cnt, flag=1):
|
||||
in_ds = gdal.Open(dem_resampled_path)
|
||||
gt = list(in_ds.GetGeoTransform())
|
||||
bands_num = in_ds.RasterCount
|
||||
x_size = in_ds.RasterXSize // part_cnt
|
||||
y_size = in_ds.RasterYSize // part_cnt
|
||||
x = [[i] * y_size for i in range(x_size)]
|
||||
y = [[i] * x_size for i in range(y_size)]
|
||||
x = np.array(x)
|
||||
x = x.T
|
||||
y = np.array(y)
|
||||
x_off = c_cnt * x_size
|
||||
y_off = r_cnt * y_size
|
||||
gt[0] = gt[0] + c_cnt * x_size * gt[1]
|
||||
gt[3] = gt[3] + r_cnt * y_size * gt[5]
|
||||
for i in range(1, bands_num + 1):
|
||||
data = in_ds.GetRasterBand(i).ReadAsArray(x_off, y_off, x_size, y_size)
|
||||
altitude = data / 255 * 1024
|
||||
longitude = gt[0] + x * gt[1]
|
||||
latitude = gt[3] + y * gt[5]
|
||||
if flag == 1:
|
||||
pstn = OrthoAuxData.LLA2XYZM(longitude, latitude, altitude)
|
||||
else:
|
||||
pstn = [longitude, latitude, altitude]
|
||||
|
||||
del in_ds, data
|
||||
return pstn
|
||||
|
||||
@staticmethod
|
||||
def read_dem_row(dem_resampled_path, p, flag=1):
|
||||
in_ds = gdal.Open(dem_resampled_path)
|
||||
gt = list(in_ds.GetGeoTransform())
|
||||
bands_num = in_ds.RasterCount
|
||||
x_size = in_ds.RasterXSize
|
||||
y_size = in_ds.RasterYSize
|
||||
|
||||
x = [[i] for i in range(x_size)]
|
||||
x = np.array(x)
|
||||
x = x.T
|
||||
y = np.ones((1, x_size)) * p
|
||||
x_off = 0
|
||||
y_off = p
|
||||
for i in range(1, bands_num + 1):
|
||||
data = in_ds.GetRasterBand(i).ReadAsArray(x_off, y_off, x_size, 1)
|
||||
altitude = data
|
||||
longitude = gt[0] + x * gt[1]
|
||||
latitude = gt[3] + y * gt[5]
|
||||
if flag == 1:
|
||||
pstn = OrthoAuxData.LLA2XYZM(longitude, latitude, altitude)
|
||||
else:
|
||||
pstn = [longitude, latitude, altitude]
|
||||
|
||||
del in_ds, data
|
||||
return pstn
|
||||
|
||||
@staticmethod
|
||||
def orbit_fitting(time_array, meta_data):
|
||||
# 最小二乘法求解轨道参数
|
||||
T0 = (time_array[0] + time_array[len(time_array)-1]) / 2
|
||||
t = []
|
||||
for i in range(len(time_array)):
|
||||
t.append(time_array[i]-T0)
|
||||
|
||||
def func(p, x):
|
||||
w3, w2, w1, w0 = p
|
||||
return w3*x**3 + w2*x**2 + w1*x + w0
|
||||
|
||||
def error(p, x, y):
|
||||
return func(p, x) - y
|
||||
|
||||
orbital_paras = []
|
||||
for j in range(len(meta_data)):
|
||||
p0 = [1, 2, 3, 4]
|
||||
x = np.array(t)
|
||||
y = np.array(meta_data[j])
|
||||
Para = leastsq(error, p0, args=(x, y))
|
||||
orbital_paras.append(Para[0])
|
||||
print(Para[0], Para[1])
|
||||
|
||||
return orbital_paras, T0
|
||||
|
||||
@staticmethod
|
||||
def get_PRF(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
sensor = root.find('sensor')
|
||||
waveParams = sensor.find('waveParams')
|
||||
PRF = float(waveParams.find('wave').find('prf').text)
|
||||
return PRF
|
||||
|
||||
@staticmethod
|
||||
def get_delta_R(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
sensor = root.find('sensor')
|
||||
pulseWidth = float(sensor.find('waveParams').find('wave').find('pulseWidth').text)
|
||||
bandWidth = float(sensor.find('waveParams').find('wave').find('bandWidth').text)
|
||||
c = 299792458
|
||||
delta_R = c / (1000000 * 2 * bandWidth)
|
||||
return delta_R
|
||||
|
||||
@staticmethod
|
||||
def get_doppler_rate_coef(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
processinfo = root.find('processinfo')
|
||||
doppler = processinfo.find('DopplerRateValuesCoefficients')
|
||||
t0 = float(processinfo.find('DopplerParametersReferenceTime').text)
|
||||
r0 = float(doppler.find('r0').text)
|
||||
r1 = float(doppler.find('r1').text)
|
||||
r2 = float(doppler.find('r2').text)
|
||||
r3 = float(doppler.find('r3').text)
|
||||
r4 = float(doppler.find('r4').text)
|
||||
|
||||
return t0, np.array([r0, r1, r2, r3, r4]).reshape(5, 1)
|
||||
|
||||
@staticmethod
|
||||
def get_doppler_center_coef(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
processinfo = root.find('processinfo')
|
||||
doppler = processinfo.find('DopplerCentroidCoefficients')
|
||||
b0 = float(doppler.find('d0').text)
|
||||
b1 = float(doppler.find('d1').text)
|
||||
b2 = float(doppler.find('d2').text)
|
||||
return b0, b1, b2
|
||||
|
||||
@staticmethod
|
||||
def get_lamda(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
sensor = root.find('sensor')
|
||||
λ = float(sensor.find('lamda').text)
|
||||
return λ
|
||||
|
||||
@staticmethod
|
||||
def get_t0(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
imageinfo = root.find('imageinfo')
|
||||
tm = imageinfo.find('imagingTime').find('start').text
|
||||
t0 = OrthoAuxData.time_stamp(tm)
|
||||
return t0
|
||||
|
||||
@staticmethod
|
||||
def get_start_and_end_time(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
imageinfo = root.find('imageinfo')
|
||||
tm0 = imageinfo.find('imagingTime').find('start').text
|
||||
tm1 = imageinfo.find('imagingTime').find('end').text
|
||||
starttime = OrthoAuxData.time_stamp(tm0)
|
||||
endtime = OrthoAuxData.time_stamp(tm1)
|
||||
return starttime, endtime
|
||||
|
||||
@staticmethod
|
||||
def get_width_and_height(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
imageinfo = root.find('imageinfo')
|
||||
width = int(imageinfo.find('width').text)
|
||||
height = int(imageinfo.find('height').text)
|
||||
return width, height
|
||||
|
||||
@staticmethod
|
||||
def get_R0(meta_file_path):
|
||||
tree = ElementTree()
|
||||
tree.parse(meta_file_path)
|
||||
root = tree.getroot()
|
||||
imageinfo = root.find('imageinfo')
|
||||
R0 = float(imageinfo.find('nearRange').text)
|
||||
return R0
|
||||
|
||||
@staticmethod
|
||||
def get_h():
|
||||
h = 6.6
|
||||
return h
|
||||
|
||||
@staticmethod
|
||||
def LLA2XYZ(longitude, latitude, altitude):
|
||||
'''
|
||||
WGS-84坐标系下:经纬度坐标转空间直角坐标
|
||||
'''
|
||||
# 经纬度余弦值
|
||||
cosLat = math.cos(latitude * math.pi / 180)
|
||||
sinLat = math.sin(latitude * math.pi / 180)
|
||||
cosLon = math.cos(longitude * math.pi / 180)
|
||||
sinLon = math.sin(longitude * math.pi / 180)
|
||||
# WGS84坐标系参数
|
||||
rad = 6378137.0 #地球赤道平均半径
|
||||
f = 1.0/298.257224 #WGS84椭球扁率
|
||||
C = 1.0/math.sqrt(cosLat*cosLat + (1-f)*(1-f)*sinLat*sinLat)
|
||||
S = (1-f)*(1-f)*C
|
||||
h = altitude
|
||||
# 计算XYZ坐标
|
||||
X = (rad * C + h) * cosLat * cosLon
|
||||
Y = (rad * C + h) * cosLat * sinLon
|
||||
Z = (rad * S + h) * sinLat
|
||||
# return np.array([X, Y, Z]).reshape(1,3)
|
||||
return [X, Y, Z]
|
||||
|
||||
@staticmethod
|
||||
def LLA2XYZM(longitude, latitude, altitude):
|
||||
# 经纬度余弦值
|
||||
cosLat = np.cos(latitude * math.pi / 180).reshape(-1,1)
|
||||
sinLat = np.sin(latitude * math.pi / 180).reshape(-1,1)
|
||||
cosLon = np.cos(longitude * math.pi / 180).reshape(-1,1)
|
||||
sinLon = np.sin(longitude * math.pi / 180).reshape(-1,1)
|
||||
# WGS84坐标系参数
|
||||
rad = 6378137.0 #地球赤道平均半径
|
||||
f = 1.0/298.257224 #WGS84椭球扁率
|
||||
C = 1.0/(np.sqrt(cosLat*cosLat + (1-f)*(1-f)*sinLat*sinLat)).reshape(-1,1)
|
||||
S = (1-f)*(1-f)*C
|
||||
h = altitude.reshape(-1,1)
|
||||
# 计算XYZ坐标
|
||||
X = (rad * C + h) * cosLat * cosLon
|
||||
Y = (rad * C + h) * cosLat * sinLon
|
||||
Z = (rad * S + h) * sinLat
|
||||
return [X, Y, Z]
|
||||
|
||||
@staticmethod
|
||||
def XYZ2LLA(X, Y, Z):
|
||||
''' 大地坐标系转经纬度
|
||||
适用于WGS84坐标系
|
||||
args:
|
||||
x,y,z
|
||||
return:
|
||||
lat,long,altitude
|
||||
'''
|
||||
# WGS84坐标系的参数
|
||||
a = 6378137.0 # 椭球长半轴
|
||||
b = 6356752.314245 # 椭球短半轴
|
||||
ea = np.sqrt((a ** 2 - b ** 2) / a ** 2)
|
||||
eb = np.sqrt((a ** 2 - b ** 2) / b ** 2)
|
||||
p = np.sqrt(X ** 2 + Y ** 2)
|
||||
theta = np.arctan2(Z * a, p * b)
|
||||
|
||||
# 计算经纬度及海拔
|
||||
longitude = np.arctan2(Y, X)
|
||||
latitude = np.arctan2(Z + eb ** 2 * b * np.sin(theta) ** 3, p - ea ** 2 * a * np.cos(theta) ** 3)
|
||||
N = a / np.sqrt(1 - ea ** 2 * np.sin(latitude) ** 2)
|
||||
altitude = p / np.cos(latitude) - N
|
||||
|
||||
# return np.array([np.degrees(latitude), np.degrees(longitude), altitude])
|
||||
return [np.degrees(longitude), np.degrees(latitude), altitude]
|
||||
|
||||
@staticmethod
|
||||
def XYZ2LLAM(X, Y, Z):
|
||||
''' 大地坐标系转经纬度
|
||||
适用于WGS84坐标系
|
||||
args:
|
||||
x,y,z
|
||||
return:
|
||||
lat,long,altitude
|
||||
'''
|
||||
# WGS84坐标系的参数
|
||||
a = 6378137.0 # 椭球长半轴
|
||||
b = 6356752.314245 # 椭球短半轴
|
||||
ea = np.sqrt((a ** 2 - b ** 2) / a ** 2)
|
||||
eb = np.sqrt((a ** 2 - b ** 2) / b ** 2)
|
||||
p = np.sqrt(X ** 2 + Y ** 2)
|
||||
theta = np.arctan2(Z * a, p * b)
|
||||
|
||||
# 计算经纬度及海拔
|
||||
longitude = np.arctan2(Y, X)
|
||||
latitude = np.arctan2(Z + eb ** 2 * b * np.sin(theta) ** 3, p - ea ** 2 * a * np.cos(theta) ** 3)
|
||||
N = a / np.sqrt(1 - ea ** 2 * np.sin(latitude) ** 2)
|
||||
altitude = p / np.cos(latitude) - N
|
||||
|
||||
# return np.array([np.degrees(latitude), np.degrees(longitude), altitude])
|
||||
return [np.degrees(longitude), np.degrees(latitude), altitude]
|
||||
|
||||
@staticmethod
|
||||
def world2Pixel(geoMatrix, x, y):
|
||||
"""
|
||||
使用GDAL库的geomatrix对象((gdal.GetGeoTransform()))计算地理坐标的像素位置
|
||||
"""
|
||||
ulx = geoMatrix[0]
|
||||
uly = geoMatrix[3]
|
||||
xDist = geoMatrix[1]
|
||||
yDist = geoMatrix[5]
|
||||
rtnX = geoMatrix[2]
|
||||
rtnY = geoMatrix[4]
|
||||
pixel = int((x - ulx) / xDist)
|
||||
line = int((uly - y) / abs(yDist))
|
||||
return pixel, line
|
||||
|
||||
@staticmethod
|
||||
def sar_intensity_synthesis(in_sar_tif, out_sar_tif):
|
||||
# 获取SLC格式SAR影像的相关信息
|
||||
in_ds = gdal.Open(in_sar_tif)
|
||||
bands_num = in_ds.RasterCount
|
||||
rows = in_ds.RasterYSize
|
||||
columns = in_ds.RasterXSize
|
||||
proj = in_ds.GetProjection()
|
||||
geotrans = in_ds.GetGeoTransform()
|
||||
|
||||
# 创建输出的SAR强度图
|
||||
gtiff_driver = gdal.GetDriverByName('GTiff')
|
||||
out_ds = gtiff_driver.Create(out_sar_tif, columns, rows, 1)
|
||||
out_ds.SetProjection(proj)
|
||||
out_ds.SetGeoTransform(geotrans)
|
||||
|
||||
# 输出SAR强度图
|
||||
in_data1 = in_ds.GetRasterBand(1).ReadAsArray(0, 0, columns, rows)
|
||||
in_data1 = in_data1/10
|
||||
in_data1 = np.power(10, in_data1)
|
||||
in_data2 = in_ds.GetRasterBand(2).ReadAsArray(0, 0, columns, rows)
|
||||
in_data2 = in_data2 / 10
|
||||
in_data2 = np.power(10, in_data2)
|
||||
out_data = np.sqrt(in_data1**2 + in_data2**2)
|
||||
out_ds.GetRasterBand(1).WriteArray(out_data)
|
||||
|
||||
del in_ds, out_ds
|
|
@ -0,0 +1,839 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
@Project :microproduct
|
||||
@File :OneOrthoMain.py
|
||||
@Function :正射校正
|
||||
@Author :KHZ
|
||||
@Contact:
|
||||
@Date :2021/8/14
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import logging
|
||||
|
||||
from tool.algorithm.block.blockprocess import BlockProcess
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.xml.AnalysisXml import xml_extend, DictXml
|
||||
from tool.algorithm.xml.CreateMetaDict import CreateMetaDict, CreateProductXml
|
||||
from tool.algorithm.algtools.PreProcess import PreProcess as pp
|
||||
import tarfile
|
||||
from tool.algorithm.xml.AlgXmlHandle import ManageAlgXML, CheckSource # 导入xml文件读取与检查文件
|
||||
from tool.config.ConfigeHandle import Config as cf
|
||||
from OrthoAlg import IndirectOrthorectification, DEMProcess,rpc_correction,getRCImageRC,get_RPC_lon_lat,getRCImageRC2
|
||||
from OrthoAlg import ScatteringAlg as alg
|
||||
from tool.algorithm.algtools.logHandler import LogHandler
|
||||
from tool.algorithm.xml.CreatMetafile import CreateMetafile
|
||||
from OrthoXmlInfo import CreateDict, CreateStadardXmlFile
|
||||
from osgeo import gdal, osr
|
||||
import os
|
||||
import glob
|
||||
# import gc
|
||||
import datetime
|
||||
import shutil
|
||||
import sys
|
||||
import scipy #解决打包错误
|
||||
import scipy.spatial.transform # 用于解决打包错误
|
||||
import scipy.spatial.transform.rotation
|
||||
import scipy.spatial.transform._rotation_groups # 用于解决打包错误
|
||||
import configparser
|
||||
config = configparser.ConfigParser()
|
||||
config.read('config.ini', encoding='utf-8')
|
||||
|
||||
DEBUG = config['config']['debug']# True
|
||||
EXE_NAME = cf.get('exe_name')
|
||||
productLevel = cf.get('productLevel')
|
||||
tar = cf.get('target')
|
||||
#env_str = os.getcwd()
|
||||
env_str =os.path.dirname(os.path.abspath(sys.argv[0])) #os.path.split(os.path.realpath(__file__))[0]
|
||||
os.environ['PROJ_LIB'] = env_str
|
||||
LogHandler.init_log_handler(os.path.join("run_log", EXE_NAME)) # r"run_log\Ortho"
|
||||
logger = logging.getLogger("mylog")
|
||||
logger.info(env_str)
|
||||
|
||||
|
||||
class LogHandler2:
|
||||
"""日志记录工具,用于输出程序运行状况。
|
||||
这里因为是单程序执行,没有必要调用logging 类。
|
||||
具体日志策略:
|
||||
1. 最外层使用try,catch 捕捉异常
|
||||
2. 最后程序执行终止判断。
|
||||
日志记录格式:
|
||||
第一行:
|
||||
TaskID,时间,输入参数文件地址。
|
||||
中间:
|
||||
时间,状态,执行步骤,消息
|
||||
|
||||
最后一行:
|
||||
finished 表示程序执行完成
|
||||
failed 程序执行出现错误
|
||||
|
||||
"""
|
||||
def __init__(self, sLogPath) -> None:
|
||||
'''
|
||||
初始化日志文件
|
||||
args:
|
||||
sLogPath:str 日志文件的路径
|
||||
raise:
|
||||
IOError:Exception 错误
|
||||
'''
|
||||
self.__sLogPath = sLogPath
|
||||
|
||||
def loggingStart(self,sTaskID,sParamPath):
|
||||
'''输出日志开头
|
||||
TaskID,时间,输入参数文件地址。
|
||||
args:
|
||||
sTaskID:str 任务ID
|
||||
sParamPath:str 任务参数文件地址
|
||||
return:
|
||||
None
|
||||
'''
|
||||
sDateTime=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") # 执行时间
|
||||
sOutput="[{}],[{}],[{}]".format(sTaskID, sDateTime, sParamPath)
|
||||
self.__outputText(sOutput)
|
||||
pass
|
||||
|
||||
|
||||
def logging(self,sStates,sExecuteStep,sException):
|
||||
"""输出中间计算信息"""
|
||||
sDateTime=datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") # 执行时间
|
||||
sOutput="[{}],[{}],[{}],[{}]".format(sDateTime,sStates,sExecuteStep,sExecuteStep)
|
||||
self.__outputText(sOutput)
|
||||
pass
|
||||
|
||||
def __outputText(self,sMessage):
|
||||
'''将消息输出到最终的日志文件中
|
||||
'''
|
||||
with open(self.__sLogPath, 'a', encoding='utf-8') as fp:
|
||||
fp.write("{}".format(sMessage))
|
||||
|
||||
def logggingEnd(self, bSuccessful):
|
||||
'''
|
||||
最后一行输出,判断输出的结果是否是正确的
|
||||
'''
|
||||
if bSuccessful:
|
||||
sEndText="\n{}\nfinished".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"))
|
||||
else:
|
||||
sEndText="\n{}\nError".format(datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"))
|
||||
self.__outputText(sEndText)
|
||||
|
||||
|
||||
class OrthoMain:
|
||||
"""
|
||||
间接定位法正射校正 主函数
|
||||
"""
|
||||
def __init__(self, alg_xml_path):
|
||||
self.alg_xml_path = alg_xml_path
|
||||
self.imageHandler = ImageHandler()
|
||||
self.__alg_xml_handler = ManageAlgXML(alg_xml_path)
|
||||
self.__check_handler = CheckSource(self.__alg_xml_handler)
|
||||
self.__workspace_path = None
|
||||
self.__task_id = None
|
||||
self.__input_paras = {}
|
||||
# self.__output_paras = {}
|
||||
self.__in_processing_paras = {}
|
||||
# self.__out_processing_paras = {}
|
||||
self.__preprocessed_paras = {}
|
||||
self.__out_para = None
|
||||
|
||||
def check_source(self):
|
||||
"""
|
||||
检查算法相关的配置文件,图
|
||||
像,辅助文件是否齐全
|
||||
"""
|
||||
if self.__check_handler.check_alg_xml() is False:
|
||||
return False
|
||||
|
||||
if self.__check_handler.check_run_env() is False:
|
||||
return False
|
||||
|
||||
input_para_names = ["SLC", "DEM", "CorrectMethod"] # //todo 增加检查校正方法
|
||||
if self.__check_handler.check_input_paras(input_para_names) is False:
|
||||
return False
|
||||
|
||||
self.__workspace_path = self.__alg_xml_handler.get_workspace_path()
|
||||
self.__task_id = self.__alg_xml_handler.get_task_id()
|
||||
self.__input_paras = self.__alg_xml_handler.get_input_paras()
|
||||
# self.__output_paras = self.__alg_xml_handler.get_output_paras()
|
||||
self.__create_work_space()
|
||||
self.__in_processing_paras = self.__init_processing_paras(self.__input_paras) # 输入{paraname:paravalue}
|
||||
# self.__out_processing_paras = self.__init_processing_paras(self.__output_paras) # 输入{paraname:paravalue}
|
||||
|
||||
self.__out_name = os.path.splitext(os.path.splitext(os.path.basename(self.__input_paras['SLC']['ParaValue']))[0])[0]
|
||||
# AlgorithmName = self.__alg_xml_handler.get_algorithm_name()
|
||||
# TaskId = self.__alg_xml_handler.get_task_id()
|
||||
result_name = self.__out_name + ".tar.gz"
|
||||
self.__out_para = os.path.join(self.__workspace_path, EXE_NAME, 'Output', result_name)
|
||||
|
||||
isError, CorrectMethod = self.__check_handler.check_input_paras(['CorrectMethod']) # //todo 获取xml中校正方法 根据不同方法进行结果处理
|
||||
|
||||
if CorrectMethod.get('CorrectMethod') == '1' or CorrectMethod.get('CorrectMethod') == 1:
|
||||
logger.info("CorrectMethod is RPC!")
|
||||
# self.__out_para=self.__out_para.replace(".tar.gz","_RPC.tar.gz")
|
||||
self.__out_para=self.__out_para.replace(".tar.gz","-" + tar + ".tar.gz")
|
||||
|
||||
elif CorrectMethod.get('CorrectMethod') == '2' or CorrectMethod.get('CorrectMethod') == 2:
|
||||
logger.info("CorrectMethod is RD!")
|
||||
# self.__out_para=self.__out_para.replace(".tar.gz","_RD.tar.gz")
|
||||
self.__out_para=self.__out_para.replace(".tar.gz","-" + tar + ".tar.gz")
|
||||
else:
|
||||
raise Exception('No CorrectMethod')
|
||||
|
||||
self.__alg_xml_handler.write_out_para("OrthoProduct", self.__out_para) #写入输出参数
|
||||
|
||||
logger.info('check_source finished!')
|
||||
logger.info('progress bar :5%')
|
||||
return True
|
||||
|
||||
def __create_work_space(self):
|
||||
"""
|
||||
删除原有工作区文件夹,创建新工作区文件夹
|
||||
"""
|
||||
self.__workspace_Output_path = os.path.join(self.__workspace_path, EXE_NAME, "Output")
|
||||
self.__workspace_Temporary_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary")
|
||||
self.__workspace_unpack_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", "unpack")
|
||||
self.__workspace_ResampledDEM_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestDEM')
|
||||
self.__workspace_baseMap_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'baseMap')
|
||||
self.__workspace_LutImg_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestLut')
|
||||
self.__workspace_IncidenceImg_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestInc')
|
||||
self.__workspace_SimImg_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestSim')
|
||||
self.__workspace_SARIntensity_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'TestSAR')
|
||||
self.__workspace_package_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", 'package')
|
||||
self.__workspace_origin_path = os.path.join(self.__workspace_path, EXE_NAME, "Temporary", "origin")
|
||||
|
||||
path_list = [self.__workspace_Output_path, self.__workspace_Temporary_path,
|
||||
self.__workspace_unpack_path, self.__workspace_ResampledDEM_path,
|
||||
self.__workspace_LutImg_path, self.__workspace_IncidenceImg_path,
|
||||
self.__workspace_SimImg_path, self.__workspace_SARIntensity_path,
|
||||
self.__workspace_package_path, self.__workspace_origin_path,
|
||||
self.__workspace_baseMap_path]
|
||||
|
||||
for path in path_list:
|
||||
if os.path.exists(path):
|
||||
if DEBUG is True:
|
||||
continue
|
||||
self.del_floder(path)
|
||||
os.makedirs(path)
|
||||
else:
|
||||
os.makedirs(path)
|
||||
logger.info('create new workspace success!')
|
||||
|
||||
@staticmethod
|
||||
def force_del_file(file_path):
|
||||
"""
|
||||
强制删除文件
|
||||
"""
|
||||
if os.path.isdir(file_path):
|
||||
for main_dir, subdir, file_name_list in os.walk(file_path):
|
||||
for filename in file_name_list:
|
||||
apath = main_dir + filename
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
os.remove(apath)
|
||||
except Exception as error: # 使用windows命令行强制删除
|
||||
os.system("del /f /q %s" % apath)
|
||||
elif os.path.isfile(file_path) is True:
|
||||
# noinspection PyBroadException
|
||||
try:
|
||||
os.remove(file_path)
|
||||
except Exception as error: # 使用windows命令行强制删除
|
||||
os.system("del /f /q %s" % file_path)
|
||||
|
||||
|
||||
@staticmethod
|
||||
def make_targz(output_filename, source_dir):
|
||||
"""
|
||||
一次性打包整个根目录。空子目录会被打包。
|
||||
如果只打包不压缩,将"w:gz"参数改为"w:"或"w"即可。
|
||||
:param output_filename:输出压缩包的完整路径,eg:'E:\test.tar.gz'
|
||||
:param source_dir:需要打包的跟目录,eg: 'E:\testFfile\'打包文件夹里面的所有文件,'E:\testFfile'打包文件夹
|
||||
"""
|
||||
dir = os.path.split(output_filename)[0]
|
||||
if os.path.exists(dir) is False:
|
||||
os.makedirs(dir)
|
||||
with tarfile.open(output_filename, "w:gz") as tar:
|
||||
tar.add(source_dir, arcname=os.path.basename(source_dir))
|
||||
|
||||
@staticmethod
|
||||
def del_floder(path_data):
|
||||
"""
|
||||
删除整个文件夹
|
||||
"""
|
||||
if os.path.isdir(path_data):
|
||||
shutil.rmtree(path_data)
|
||||
|
||||
def del_temp_workspace(self):
|
||||
"""
|
||||
临时工作区
|
||||
"""
|
||||
if DEBUG is True:
|
||||
return
|
||||
path = self.__workspace_path + EXE_NAME + r'\Temporary'
|
||||
if os.path.exists(path):
|
||||
self.del_floder(path)
|
||||
|
||||
def __init_processing_paras(self, names):
|
||||
"""
|
||||
:param names:字典列表,每个字典为一个输入产品的配置信息
|
||||
"""
|
||||
processing_paras = {}
|
||||
for name in names:
|
||||
para = names[name]
|
||||
if para is None:
|
||||
logger.error(name + "is None!")
|
||||
return False
|
||||
|
||||
if para['ParaType'] == 'File':
|
||||
if para['DataType'] == 'File':
|
||||
para_path = os.path.join(self.__workspace_path, para['ParaValue'])
|
||||
processing_paras.update({name: para_path})
|
||||
if para['DataType'] == 'xml':
|
||||
para_path = os.path.join(self.__workspace_path, para['ParaValue'])
|
||||
processing_paras.update({name: para_path})
|
||||
if para['DataType']=="ymal":
|
||||
para_path = os.path.join(self.__workspace_path, para['ParaValue'])
|
||||
processing_paras.update({name: para_path})
|
||||
if para['DataType'] == 'tar.gz':
|
||||
para_path = os.path.join(self.__workspace_path, para['ParaValue'])
|
||||
tar_gz_dic = self.__dec_tar_gz(name, para_path, self.__workspace_unpack_path)
|
||||
processing_paras.update(tar_gz_dic)
|
||||
if para['DataType'] == 'tif' or para['DataType'] == 'tiff': # 新增修改dem数据为文件绝对路径
|
||||
if para['ParaValue'] != 'empty' and para['ParaValue'] != 'Empty' and para['ParaValue'] != '':
|
||||
para_path_list = para['ParaValue'].split(";")
|
||||
if len(para_path_list) != 0:
|
||||
dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
|
||||
if os.path.exists(dem_path) is False:
|
||||
os.mkdir(dem_path)
|
||||
for file_path in para_path_list:
|
||||
tif_name = os.path.basename(file_path)
|
||||
shutil.copy(file_path, os.path.join(dem_path, tif_name))
|
||||
para_path = os.path.join(self.__workspace_origin_path,para['ParaName'])
|
||||
processing_paras.update({name: para_path})
|
||||
if para['DataType'] == 'zip': # 新增修改dem数据为文件绝对路径
|
||||
if para['ParaValue'] != 'empty' and para['ParaValue'] != 'Empty' and para['ParaValue'] != '':
|
||||
para_path_list = para['ParaValue'].split(";")
|
||||
if len(para_path_list) != 0:
|
||||
dem_path = os.path.join(self.__workspace_origin_path, para['ParaName'])
|
||||
if os.path.exists(dem_path) is False:
|
||||
os.mkdir(dem_path)
|
||||
for file_path in para_path_list:
|
||||
BlockProcess.unzip_dem(file_path, dem_path)
|
||||
processing_paras.update({name: dem_path})
|
||||
elif para['ParaType'] == 'Value':
|
||||
if para['DataType'] == 'float':
|
||||
value = float(para['ParaValue'])
|
||||
processing_paras.update({name: value})
|
||||
return processing_paras
|
||||
|
||||
def extract_tarfile(self,tar_file, target_dir):
|
||||
with tarfile.open(tar_file, 'r') as tar_ref:
|
||||
for member in tar_ref.getmembers():
|
||||
if member.isdir():
|
||||
continue
|
||||
filename = os.path.basename(member.name)
|
||||
source = tar_ref.extractfile(member)
|
||||
target = open(os.path.join(target_dir, filename), "wb")
|
||||
with source, target:
|
||||
shutil.copyfileobj(source, target)
|
||||
|
||||
|
||||
def __dec_tar_gz(self, name1, tar_gz_path, out_dir):
|
||||
"""
|
||||
解压.tar_gz格式景影像文件
|
||||
:param tar_gz_path:.tar_gz文件路径
|
||||
:param out_dir:输出文件夹
|
||||
:return para_dic:全极化影像路径
|
||||
"""
|
||||
# 创建文件夹
|
||||
name = os.path.split(tar_gz_path)[1].rstrip('.tar.gz')
|
||||
name_d = name.split('_')[6]
|
||||
file_dir = os.path.join(out_dir, name_d + '\\')
|
||||
if os.path.exists(file_dir) is False:
|
||||
os.makedirs(file_dir)
|
||||
# 解压
|
||||
logging.info("untargz path:{}".format(file_dir))
|
||||
self.extract_tarfile(tar_gz_path, file_dir)
|
||||
# t = tarfile.open(tar_gz_path)
|
||||
# t.extractall(path=file_dir)
|
||||
|
||||
# 获取文件夹内的文件
|
||||
para_dic = {}
|
||||
# if os.path.exists(file_dir + name + '\\'):
|
||||
# meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.meta.xml')))
|
||||
# para_dic.update({'SLC': file_dir + name})
|
||||
# else:
|
||||
# meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.meta.xml')))
|
||||
# para_dic.update({'SLC': file_dir})
|
||||
if os.path.exists(file_dir + name + '\\'):
|
||||
meta_xml_paths = list(glob.glob(os.path.join(file_dir + name, '*.xml')))
|
||||
para_dic.update({'SLC': file_dir + name})
|
||||
else:
|
||||
meta_xml_paths = list(glob.glob(os.path.join(file_dir, '*.xml')))
|
||||
para_dic.update({'SLC': file_dir})
|
||||
|
||||
if meta_xml_paths == []:
|
||||
raise Exception('there is not .meta.xml in path: ', file_dir + '\\')
|
||||
para_dic.update({'META': meta_xml_paths[0]})
|
||||
self.image_meta_xml = meta_xml_paths
|
||||
# para_dic.update({name1: file_dir}) # {SLC: file_path}
|
||||
|
||||
# 获取文件夹内的文件
|
||||
hh_flag, hv_flag, vh_flag, vv_flag, dh_flag = 0, 0, 0, 0, 0 #
|
||||
if os.path.exists(file_dir + name + '\\'):
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir + name + '\\', '*.tif')))
|
||||
if in_tif_paths == []:
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir + name + '\\', '*.tiff')))
|
||||
else:
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir, '*.tif')))
|
||||
if in_tif_paths == []:
|
||||
in_tif_paths = list(glob.glob(os.path.join(file_dir, '*.tiff')))
|
||||
for in_tif_path in in_tif_paths:
|
||||
# 获取极化类型
|
||||
if 'hh' in os.path.basename(in_tif_path) or 'HH' in os.path.basename(in_tif_path):
|
||||
hh_flag = 1
|
||||
elif 'hv' in os.path.basename(in_tif_path) or 'HV' in os.path.basename(in_tif_path):
|
||||
hv_flag = 1
|
||||
elif 'vh' in os.path.basename(in_tif_path) or 'VH' in os.path.basename(in_tif_path):
|
||||
vh_flag = 1
|
||||
elif 'vv' in os.path.basename(in_tif_path) or 'VV' in os.path.basename(in_tif_path):
|
||||
vv_flag = 1
|
||||
elif "DH" in os.path.basename(in_tif_path):
|
||||
dh_flag = 1
|
||||
if hh_flag == 0 and hv_flag == 0 and vh_flag == 0 and vv_flag == 0 and dh_flag == 0:
|
||||
raise Exception('can not found files: HH、HV、VH、VV、DH in path:', tar_gz_path)
|
||||
self.processinfo = [hh_flag, hv_flag, vh_flag, vv_flag,dh_flag]
|
||||
return para_dic
|
||||
|
||||
def process_handle(self):
|
||||
isError, CorrectMethod = self.__check_handler.check_input_paras(['CorrectMethod']) # //todo 获取xml中校正方法 根据不同方法进行结果处理
|
||||
|
||||
if CorrectMethod.get('CorrectMethod') == '1' or CorrectMethod.get('CorrectMethod') == 1:
|
||||
logger.info("CorrectMethod is RPC!")
|
||||
return self.RD_process_handle()
|
||||
|
||||
elif CorrectMethod.get('CorrectMethod') == '2' or CorrectMethod.get('CorrectMethod') == 2:
|
||||
logger.info("CorrectMethod is RD!")
|
||||
|
||||
return self.RD_process_handle()
|
||||
else:
|
||||
raise Exception('No CorrectMethod')
|
||||
|
||||
|
||||
def RPC_process_handle(self):
|
||||
logger.info(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
|
||||
# print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
|
||||
# 1、DEM拼接、裁剪、重采样
|
||||
Orth_Slc=[]
|
||||
in_dem_path = self.__in_processing_paras['DEM']
|
||||
meta_file_path = self.__in_processing_paras['META'] # .meta文件路径
|
||||
out_dem_path = self.__workspace_ResampledDEM_path
|
||||
dem_merged_path=DEMProcess.dem_merged(in_dem_path, meta_file_path, out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif
|
||||
|
||||
# 2、间接定位法求解行列坐标
|
||||
slc_paths = self.__in_processing_paras["SLC"]
|
||||
rpc_slc_path=None
|
||||
for slc_path in os.listdir(slc_paths):
|
||||
if slc_path.find(".tiff")>0:
|
||||
slc_path_temp=os.path.join(slc_paths,slc_path)
|
||||
# out_power_path=os.path.join(self.__workspace_Temporary_path,slc_path.replace(".tiff","_db.tif").replace("L1A","L1B"))
|
||||
out_power_path=os.path.join(self.__workspace_Temporary_path,slc_path.replace(".tiff","_db.tif").replace("L1A","L1B").replace("HH","h_h").replace("HV","h_v").replace("VH","v_h").replace("VV","v_v"))
|
||||
alg.sar_backscattering_coef(slc_path_temp,self.__in_processing_paras['META'],out_power_path)
|
||||
rpc_slc_path=slc_path_temp.replace(".tiff",".rpc")
|
||||
if not os.path.exists(rpc_slc_path):
|
||||
rpc_slc_path=slc_path_temp.replace(".tiff",".rpb")
|
||||
if not os.path.exists(rpc_slc_path):
|
||||
logger.error("rpc file Not Found!")
|
||||
# out_slc_power_path=os.path.join(self.__workspace_package_path,os.path.basename(out_power_path.replace("_db.tif",".tif").replace("L1B","L4")))
|
||||
out_slc_power_path=os.path.join(self.__workspace_package_path,os.path.basename(out_power_path.replace("_db.tif","-ortho.tif")))
|
||||
rpc_correction(out_power_path,rpc_slc_path,out_slc_power_path, dem_merged_path)
|
||||
logger.info('progress bar: 30%')
|
||||
# 2.1 生成映射表
|
||||
slc_path=os.path.join(slc_paths,os.listdir(slc_paths)[0])
|
||||
out_rpc_rc_path = os.path.join(self.__workspace_package_path,"ori_sim-ortho.tif")
|
||||
get_RPC_lon_lat(out_power_path,out_rpc_rc_path)
|
||||
#getRCImageRC(slc_path_temp,out_rpc_rc_path,rpc_slc_path)
|
||||
logger.info('progress bar: 70%')
|
||||
# 2.2 生成局地入射角
|
||||
Orthorectification = IndirectOrthorectification(os.path.join(os.path.dirname(__file__),"config.yaml"))
|
||||
Orthorectification.IndirectOrthorectification(self.__in_processing_paras["SLC"],self.__workspace_package_path) # 改动1
|
||||
out_incangle_path=os.path.join(self.__workspace_package_path,"inci_Angle-ortho.tif")
|
||||
out_localincangle_path=os.path.join(self.__workspace_package_path,"LocalincidentAngle-ortho.tif")
|
||||
out_incangle_geo_path=os.path.join(self.__workspace_package_path,"inc_angle.tif")
|
||||
out_localincangle_geo_path=os.path.join(self.__workspace_package_path,"LocalincidenceAngle.tif") # 决定入射角名字
|
||||
Orthorectification.getRPC_incidenceAngle_lon_lat(dem_merged_path,out_rpc_rc_path,self.__workspace_Temporary_path,self.__workspace_package_path,out_incangle_path,out_localincangle_path,out_incangle_geo_path,out_localincangle_geo_path)
|
||||
# 2.3 输出结果
|
||||
|
||||
# self.del_floder(self.__workspace_processing_path)
|
||||
logger.info('process_handle finished!')
|
||||
logger.info('progress bar :90%')
|
||||
# 7、打包生成快视图
|
||||
for tiff_name in os.listdir(self.__workspace_package_path):
|
||||
if tiff_name.find(".tiff")>0 or tiff_name.find(".tif")>0:
|
||||
self.imageHandler.write_quick_view(os.path.join(self.__workspace_package_path,tiff_name))
|
||||
|
||||
# 1/5、移动原始数据 ------------------------- 这里限制 原始数据是否进入 最终产品列表中
|
||||
for maindir, subdir, file_name_list in os.walk(slc_paths):
|
||||
for filename in file_name_list:
|
||||
apath = os.path.join(maindir, filename)
|
||||
file_type = apath.split('.')[-1]
|
||||
if file_type in ["xml"]:
|
||||
output = os.path.join(self.__workspace_package_path, filename)
|
||||
shutil.copy(apath, output)
|
||||
else:
|
||||
output=os.path.join(self.__workspace_package_path, filename)
|
||||
shutil.copy(apath, output)
|
||||
|
||||
# 生成元文件案例
|
||||
# xml_path = "./model_meta.xml"
|
||||
tem_folder=self.__workspace_path + EXE_NAME + r"\Temporary""\\"
|
||||
image_path=out_slc_power_path# os.path.join(self.__workspace_package_path, "OrthoMapTable.tif")
|
||||
out_path1 = os.path.join(tem_folder, "trans_geo_projcs.tif")
|
||||
out_path2 = os.path.join(tem_folder, "trans_projcs_geo.tif")
|
||||
# par_dict = CreateDict().calu_nature(image_path, self.processinfo, out_path1, out_path2)
|
||||
#
|
||||
# dem_path=os.path.join(self.__workspace_ResampledDEM_path, 'mergedDEM.tif')
|
||||
# out_dem_path1 = os.path.join(tem_folder, "trans_dem_geo_projcs.tif")
|
||||
# out_dem_path2 = os.path.join(tem_folder, "trans_dem_projcs_geo.tif")
|
||||
# # par_dict2 = CreateDict().calu_dem_nature(dem_path, dem_meta, out_dem_path1, out_dem_path2, sampling_f, para_A_arr)
|
||||
# # par_dict2 = CreateDict().calu_dem_nature(dem_path, dem_meta, out_dem_path1, out_dem_path2, sampling_f, para_A_arr)
|
||||
# par_dict2 = CreateDict().calu_dem_nature(dem_path, out_dem_path1, out_dem_path2, None,Orthorectification.SatelliteOrbitModel.A_arr)
|
||||
# model_xml_path = os.path.join(self.__workspace_Temporary_path, "creat_standard.meta.xml") # 输出xml路径
|
||||
# CreateStadardXmlFile(xml_path, self.alg_xml_path, par_dict, par_dict2, model_xml_path).create_standard_xml()
|
||||
#
|
||||
# sar_image_meta_xml = list(glob.glob(os.path.join(self.__workspace_package_path, '*.meta.xml')))
|
||||
# meta_xml_path = os.path.join(self.__workspace_package_path, os.path.basename(self.__out_para).replace(".tar.gz",".meta.xml"))
|
||||
# CreateMetafile(sar_image_meta_xml[0], self.alg_xml_path, model_xml_path, meta_xml_path).process(os.path.basename(self.__in_processing_paras["SLC"]))
|
||||
|
||||
model_path = "./product.xml"
|
||||
meta_xml_path = os.path.join(self.__workspace_package_path,
|
||||
os.path.basename(self.__out_para).replace(".tar.gz", ".meta.xml"))
|
||||
|
||||
para_dict = CreateMetaDict(image_path, self.__in_processing_paras['META'], self.__workspace_package_path,
|
||||
out_path1, out_path2).calu_nature()
|
||||
para_dict.update({"ProductProductionInfo_BandSelection": "1,2"})
|
||||
para_dict.update({"ProductProductionInfo_AuxiliaryDataDescription": "DEM"})
|
||||
CreateProductXml(para_dict, model_path, meta_xml_path).create_standard_xml()
|
||||
# 生成压缩包
|
||||
logger.info('progress bar :94%')
|
||||
logger.info('start make targz..')
|
||||
self.del_floder(self.__workspace_unpack_path)
|
||||
self.del_floder(self.__workspace_ResampledDEM_path)
|
||||
self.del_floder(self.__workspace_LutImg_path)
|
||||
self.del_floder(self.__workspace_IncidenceImg_path)
|
||||
self.del_floder(self.__workspace_SimImg_path)
|
||||
self.del_floder(self.__workspace_SARIntensity_path)
|
||||
self.make_targz(self.__out_para, self.__workspace_package_path+"\\")
|
||||
logger.info('make targz finish')
|
||||
logger.info('progress bar :100%')
|
||||
return True
|
||||
|
||||
|
||||
pass
|
||||
|
||||
|
||||
def cut_dem(self, dem_merged_path, meta_file_path):
|
||||
_, scopes = DictXml(meta_file_path).get_extend()
|
||||
intersect_polygon = pp().intersect_polygon(scopes)
|
||||
if intersect_polygon is None:
|
||||
raise Exception('cal intersect box fail!')
|
||||
shp_path = os.path.join(self.__workspace_Temporary_path, 'IntersectPolygon.shp')
|
||||
if pp().write_polygon_shp(shp_path, intersect_polygon, 4326) is False:
|
||||
raise Exception('create intersect shp fail!')
|
||||
dem_process = os.path.join(self.__workspace_Temporary_path, 'dem_cut.tif')
|
||||
pp().cut_img(dem_process, dem_merged_path, shp_path)
|
||||
return dem_process
|
||||
|
||||
|
||||
def process_sim_ori(self, sim_ori):
|
||||
|
||||
scopes = ()
|
||||
scopes += (DictXml(self.__in_processing_paras['META']).get_extend(),)
|
||||
intersect_polygon = pp().intersect_polygon(scopes)
|
||||
if intersect_polygon is None:
|
||||
raise Exception('create intersect shp fail!')
|
||||
shp_path = os.path.join(self.__workspace_Temporary_path, 'IntersectPolygon.shp')
|
||||
if pp().write_polygon_shp(shp_path, intersect_polygon, 4326) is False:
|
||||
raise Exception('create intersect shp fail!')
|
||||
sim_ori_process = os.path.join(self.__workspace_Temporary_path, 'sim_ori_process.tif')
|
||||
pp().cut_img(sim_ori_process, sim_ori, shp_path)
|
||||
return sim_ori_process
|
||||
|
||||
def correct_sim_ori(self, Orthorectification, slc_paths, bsMap_merged_path, out_dir_path):
|
||||
# 对映射表进行校正
|
||||
sim_ori_tiff = out_dir_path + "\\" + "RD_sim_ori.tif"
|
||||
out_sim_ori = out_dir_path + "\\" + "sim_ori-ortho.tif"
|
||||
parameter_path = os.path.join(self.__workspace_package_path, "orth_para.txt")
|
||||
in_tif_paths = list(glob.glob(os.path.join(slc_paths, '*.tiff')))
|
||||
out_rpc_db = os.path.join(self.__workspace_baseMap_path, 'rpc_line.tif')
|
||||
alg.sar_backscattering_coef_RPC(in_tif_paths[0], self.__in_processing_paras['META'], out_rpc_db)
|
||||
|
||||
db_tif_path = os.path.join(self.__workspace_baseMap_path, 'rpc_db_geo.tif')
|
||||
|
||||
Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, sim_ori_tiff, out_rpc_db,
|
||||
db_tif_path)
|
||||
dataset = ImageHandler().get_dataset(db_tif_path)
|
||||
baseMapCut = os.path.join(self.__workspace_baseMap_path, 'baseMapCut.tif')
|
||||
inputCut = os.path.join(self.__workspace_baseMap_path, 'inputCut.tif')
|
||||
baseMapResample = os.path.join(self.__workspace_baseMap_path, 'baseMapCut_Resample.tif')
|
||||
shpCenterFile = os.path.join(self.__workspace_baseMap_path, 'shpCenter.shp')
|
||||
center_scopes = (ImageHandler().get_center_scopes(dataset),)
|
||||
intersect_polygon = pp().intersect_polygon(center_scopes)
|
||||
if intersect_polygon is None:
|
||||
raise Exception('create intersect shp fail!')
|
||||
if pp().write_polygon_shp(shpCenterFile, intersect_polygon, 4326) is False:
|
||||
raise Exception('create intersect shp fail!')
|
||||
pp().cut_img(baseMapCut, bsMap_merged_path, shpCenterFile)
|
||||
pp().cut_img(inputCut, db_tif_path, shpCenterFile)
|
||||
pp().resampling_by_scale(baseMapCut, baseMapResample, inputCut)
|
||||
in_sar_png = self.imageHandler.write_view(inputCut)
|
||||
baseMap_png = self.imageHandler.write_view(baseMapResample)
|
||||
Orthorectification.get_offset(in_sar_png, baseMap_png, inputCut)
|
||||
off_txt = os.path.join(os.path.dirname(inputCut), 'off.txt')
|
||||
with open(off_txt, 'r') as f:
|
||||
data = f.readlines()
|
||||
x = float(data[0])
|
||||
y = float(data[1])
|
||||
im_proj, im_geotrans, im_arr = self.imageHandler.read_img(sim_ori_tiff)
|
||||
lon_new = im_geotrans[0] + x
|
||||
lat_new = im_geotrans[3] - y
|
||||
im_geosNew = [lon_new, im_geotrans[1], im_geotrans[2], lat_new, im_geotrans[4], im_geotrans[5]]
|
||||
ImageHandler().write_img(out_sim_ori, im_proj, im_geosNew, im_arr)
|
||||
os.remove(sim_ori_tiff)
|
||||
return out_sim_ori
|
||||
|
||||
def RD_process_handle(self):
|
||||
# RPC
|
||||
logger.info(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
|
||||
# print(datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'))
|
||||
# 1、DEM拼接、裁剪、重采样
|
||||
Orth_Slc=[]
|
||||
in_dem_path = self.__in_processing_paras['DEM']
|
||||
meta_file_path = self.__in_processing_paras['META'] # .meta文件路径
|
||||
out_dem_path = self.__workspace_ResampledDEM_path
|
||||
dem_merged_path=DEMProcess.dem_merged(in_dem_path, meta_file_path, out_dem_path) # 生成TestDEM\mergedDEM_VRT.tif
|
||||
|
||||
# bsMap = self.__in_processing_paras['baseMap']
|
||||
# ortho_bsMap_path = self.__workspace_baseMap_path
|
||||
# bsMap_merged_path = DEMProcess.bsMap_merged(bsMap, meta_file_path, ortho_bsMap_path)
|
||||
|
||||
dem_path = self.cut_dem(dem_merged_path, meta_file_path)
|
||||
# 2、间接定位法求解行列坐标
|
||||
slc_paths = self.__in_processing_paras["SLC"]
|
||||
# 2.1 生成映射表
|
||||
slc_path=os.path.join(slc_paths,os.listdir(slc_paths)[0])
|
||||
|
||||
# 2.2 生成局地入射角
|
||||
path2 = env_str
|
||||
Orthorectification = IndirectOrthorectification(os.path.join(path2,"config.yaml"))
|
||||
Orthorectification.IndirectOrthorectification(self.__in_processing_paras["SLC"], self.__workspace_package_path) # 改动1
|
||||
# 2.3 输出结果
|
||||
|
||||
# 3 处理RD
|
||||
in_slc_path=None
|
||||
for slc_path in os.listdir(slc_paths):
|
||||
if slc_path.find(".tiff")>0 and (slc_path.find("_HH_")>0 or slc_path.find("_VV_")>0 or slc_path.find("_DH_")>0):
|
||||
in_slc_path=os.path.join(slc_paths,slc_path)
|
||||
break
|
||||
# 获取校正模型后
|
||||
Orthorectification.preCaldem_sar_rc(dem_path,in_slc_path,self.__workspace_Temporary_path,self.__workspace_package_path.replace("\\","\\\\")) # 初步筛选坐标范围
|
||||
out_dir_path = self.__workspace_package_path.replace("\\", "\\\\")
|
||||
# sim_ori_rpc = self.correct_sim_ori(Orthorectification, slc_paths, bsMap_merged_path, out_dir_path)
|
||||
logger.info('progress bar: 40%')
|
||||
# clip_dem_reample_path=os.path.join(self.__workspace_Temporary_path, "SAR_dem.tiff")
|
||||
# infooption=gdal.InfoOptions("-json")
|
||||
# clip_dem_tif_info=gdal.Info(clip_dem_reample_path,options=infooption)
|
||||
# dem_merged_info=gdal.Info(dem_merged_path,options=infooption)
|
||||
# sampling_f=clip_dem_tif_info['size'][0]/dem_merged_info['size'][0]
|
||||
|
||||
|
||||
this_outSpace_path = out_dir_path
|
||||
this_out_dem_slantRange_path = out_dir_path + "\\" + "dem_slantRange.tiff"#// 地形斜距
|
||||
this_out_plant_slantRange_path = out_dir_path + "\\" + "flat_slantRange.tiff"#// 平地斜距
|
||||
# 保留结果
|
||||
if(os.path.exists(this_out_dem_slantRange_path)):
|
||||
os.remove(this_out_dem_slantRange_path)
|
||||
|
||||
if(os.path.exists(this_out_plant_slantRange_path)):
|
||||
os.remove(this_out_plant_slantRange_path)
|
||||
|
||||
|
||||
this_out_dem_rc_path = out_dir_path + "\\" + "WGS_SAR_map.tiff"#// 经纬度与行列号映射
|
||||
if(os.path.exists(this_out_dem_rc_path)):
|
||||
os.remove(this_out_dem_rc_path)
|
||||
|
||||
this_out_sar_sim_path = out_dir_path + "\\" + "sar_sim.tiff"
|
||||
if (os.path.exists(this_out_sar_sim_path)):
|
||||
os.remove(this_out_sar_sim_path)
|
||||
|
||||
this_out_sar_sim_wgs_path = out_dir_path + "\\" + "sar_sim_wgs.tiff" # // 经纬度与行列号映射
|
||||
if (os.path.exists(this_out_sar_sim_wgs_path)):
|
||||
os.remove(this_out_sar_sim_wgs_path)
|
||||
|
||||
|
||||
this_out_incidence_path = out_dir_path + "\\" + "incidentAngle.tiff"#// 入射角
|
||||
this_out_localIncidenct_path = out_dir_path + "\\" + "localincidentAngle.tiff"#// 局地入射角
|
||||
this_out_inc_angle_rpc_path = out_dir_path + "\\" + "RD_incidentAngle.tiff"#// 局地入射角
|
||||
this_out_local_inc_angle_rpc_path = out_dir_path + "\\" + "RD_localincidentAngle.tiff"#// 局地入射角
|
||||
|
||||
if (os.path.exists(this_out_inc_angle_rpc_path)):
|
||||
shutil.move(this_out_inc_angle_rpc_path, out_dir_path + "\\" + "inci_Angle-ortho.tif")
|
||||
|
||||
if (os.path.exists(this_out_local_inc_angle_rpc_path)):
|
||||
shutil.move(this_out_local_inc_angle_rpc_path, out_dir_path + "\\" + "LocalIncidentAngle-ortho.tif")
|
||||
|
||||
if(os.path.exists(this_out_incidence_path)):
|
||||
shutil.move(this_out_incidence_path,out_dir_path + "\\" + "inc_angle.tif")
|
||||
|
||||
if(os.path.exists(this_out_localIncidenct_path)):
|
||||
shutil.move(this_out_localIncidenct_path,out_dir_path + "\\" + "LocalIncidenceAngle.tif")
|
||||
|
||||
this_out_ori_sim_tiff = out_dir_path + "\\" + "RD_ori_sim.tif"#// 局地入射角
|
||||
if (os.path.exists(this_out_ori_sim_tiff)):
|
||||
shutil.move(this_out_ori_sim_tiff, out_dir_path + "\\" + "ori_sim-ortho.tif")
|
||||
#
|
||||
# this_out_sim_ori_tiff = out_dir_path + "\\" + "RD_sim_ori.tif" # // 局地入射角
|
||||
# if (os.path.exists(this_out_sim_ori_tiff)):
|
||||
# shutil.move(this_out_sim_ori_tiff, out_dir_path + "\\" + "sim_ori-ortho.tif")
|
||||
|
||||
# GTC 入射角
|
||||
GTC_rc_path=os.path.join(self.__workspace_package_path,"ori_sim-ortho.tif")
|
||||
GTC_out_path=self.__workspace_package_path
|
||||
|
||||
parameter_path = os.path.join(self.__workspace_package_path, "orth_para.txt")
|
||||
|
||||
|
||||
dem_rc = os.path.join(self.__workspace_package_path, "sim_ori-ortho.tif")
|
||||
# dem_rc_pro = self.process_sim_ori(dem_rc)
|
||||
# shutil.move(dem_rc_pro, dem_rc)
|
||||
in_tif_paths = list(glob.glob(os.path.join(slc_paths, '*.tiff')))
|
||||
for in_tif_path in in_tif_paths:
|
||||
out_sar_path = os.path.join(GTC_out_path, os.path.split(in_tif_path)[1])
|
||||
slc_path_temp=os.path.join(slc_paths,in_tif_path)
|
||||
out_power_path = os.path.join(self.__workspace_Temporary_path,
|
||||
slc_path_temp.replace(".tiff", "-lin.tif").replace("L1A", "L1B")).replace(
|
||||
"HH", "h_h").replace("HV", "h_v").replace("VH", "v_h").replace("VV", "v_v").replace("DH", "d_h")
|
||||
# out_power_path=os.path.join(self.__workspace_Temporary_path,slc_path_temp.replace(".tiff","_db.tif"))
|
||||
alg.sar_backscattering_coef(slc_path_temp, self.__in_processing_paras['META'], out_power_path)
|
||||
|
||||
lin_tif_path = os.path.join(self.__workspace_Temporary_path,
|
||||
os.path.basename(out_power_path).split('-')[0] + "-lin_geo.tif")
|
||||
# Orthorectification.calInterpolation_cubic_Wgs84_rc_sar_sigma(parameter_path, dem_rc,
|
||||
# out_power_path,
|
||||
# lin_tif_path)
|
||||
|
||||
Orthorectification.calInterpolation_bil_Wgs84_rc_sar_sigma(parameter_path, dem_rc,
|
||||
out_power_path,
|
||||
lin_tif_path)
|
||||
tempout_tif_path = os.path.join(self.__workspace_package_path,
|
||||
os.path.basename(lin_tif_path).split('-')[0] + "-" + tar + ".tif")
|
||||
alg.lin_to_db(lin_tif_path, tempout_tif_path) # 线性值转回DB值
|
||||
|
||||
# temp_slc_path=os.path.join(self.__workspace_package_path, os.path.basename(out_power_path))
|
||||
# temp_slc_path=temp_slc_path.replace("_db.tif","-ortho.tif")
|
||||
#inter_Range2Geo(self,lon_lat_path , data_tiff , grid_path , space)
|
||||
|
||||
# Orthorectification.inter_Range2Geo(GTC_rc_path,out_power_path,temp_slc_path,Orthorectification.heightspace)
|
||||
# Orthorectification.calInterpolation_cubic_Wgs84_rc_sar_sigma(parameter_path, dem_rc, out_power_path, temp_slc_path) #
|
||||
#Orth_Slc.append(temp_slc_path)
|
||||
# power_list.append(out_power_path)
|
||||
|
||||
for tiff_name in os.listdir(self.__workspace_package_path):
|
||||
if tiff_name.find(".tiff")>0 or tiff_name.find(".tif")>0:
|
||||
self.imageHandler.write_quick_view(os.path.join(self.__workspace_package_path,tiff_name))
|
||||
|
||||
# 1/5、原始数据中的.incidence.xml、.meta.xml输出,影像不输出
|
||||
for maindir, subdir, file_name_list in os.walk(slc_paths):
|
||||
for filename in file_name_list:
|
||||
apath = os.path.join(maindir, filename)
|
||||
file_type = apath.split('.')[-1]
|
||||
if file_type in ["xml"]:
|
||||
output = os.path.join(self.__workspace_package_path, filename)
|
||||
shutil.copy(apath, output)
|
||||
elif 'lin' in filename:
|
||||
continue
|
||||
else:
|
||||
output=os.path.join(self.__workspace_package_path, filename)
|
||||
shutil.copy(apath, output)
|
||||
|
||||
# 2/5、正射的成果图
|
||||
# for maindir, subdir, file_name_list in os.walk(self.__workspace_package_path):
|
||||
# for filename in file_name_list:
|
||||
# apath = os.path.join(maindir, filename)
|
||||
# file_type = filename.split('.')[-1]
|
||||
# image_name = os.path.splitext(filename)[0]
|
||||
#self.imageHandler.write_quick_view(output_OrthoResult) # 快视图
|
||||
# 生成元文件案例
|
||||
# xml_path = "./model_meta.xml"
|
||||
tem_folder=self.__workspace_path + EXE_NAME + r"\Temporary""\\"
|
||||
image_path=tempout_tif_path# os.path.join(self.__workspace_package_path, "OrthoMapTable.tif")
|
||||
out_path1 = os.path.join(tem_folder, "trans_geo_projcs.tif")
|
||||
out_path2 = os.path.join(tem_folder, "trans_projcs_geo.tif")
|
||||
# par_dict = CreateDict().calu_nature(image_path, self.processinfo, out_path1, out_path2)
|
||||
#
|
||||
# dem_path=os.path.join(self.__workspace_ResampledDEM_path, 'mergedDEM.tif')
|
||||
# out_dem_path1 = os.path.join(tem_folder, "trans_dem_geo_projcs.tif")
|
||||
# out_dem_path2 = os.path.join(tem_folder, "trans_dem_projcs_geo.tif")
|
||||
# # par_dict2 = CreateDict().calu_dem_nature(dem_path, dem_meta, out_dem_path1, out_dem_path2, sampling_f, para_A_arr)
|
||||
# par_dict2 = CreateDict().calu_dem_nature(dem_path, out_dem_path1, out_dem_path2, sampling_f,Orthorectification.SatelliteOrbitModel.A_arr)
|
||||
# model_xml_path = os.path.join(self.__workspace_Temporary_path, "creat_standard.meta.xml") # 输出xml路径
|
||||
# CreateStadardXmlFile(xml_path, self.alg_xml_path, par_dict, par_dict2, model_xml_path).create_standard_xml()
|
||||
#
|
||||
# sar_image_meta_xml = list(glob.glob(os.path.join(self.__workspace_package_path, '*.meta.xml')))
|
||||
# meta_xml_path = os.path.join(self.__workspace_package_path, os.path.basename(self.__out_para).replace(".tar.gz",".meta.xml"))
|
||||
# CreateMetafile(sar_image_meta_xml[0], self.alg_xml_path, model_xml_path, meta_xml_path).process(os.path.basename(self.__in_processing_paras["SLC"]))
|
||||
model_path = "./product.xml"
|
||||
meta_xml_path = os.path.join(self.__workspace_package_path, os.path.basename(self.__out_para).replace(".tar.gz",".meta.xml"))
|
||||
|
||||
para_dict = CreateMetaDict(image_path, self.__in_processing_paras['META'], self.__workspace_package_path, out_path1, out_path2).calu_nature()
|
||||
para_dict.update({"imageinfo_ProductName": '正射校正'})
|
||||
para_dict.update({"imageinfo_ProductIdentifier": 'Ortho'})
|
||||
para_dict.update({"imageinfo_ProductLevel": productLevel})
|
||||
para_dict.update({"ProductProductionInfo_BandSelection": "1,2"})
|
||||
para_dict.update({"ProductProductionInfo_AuxiliaryDataDescription": "DEM"})
|
||||
CreateProductXml(para_dict, model_path, meta_xml_path).create_standard_xml()
|
||||
|
||||
temp_folder = os.path.join(self.__workspace_path, EXE_NAME, 'Output')
|
||||
out_xml = os.path.join(temp_folder, os.path.basename(meta_xml_path))
|
||||
if os.path.exists(temp_folder) is False:
|
||||
os.mkdir(temp_folder)
|
||||
# CreateProductXml(para_dict, model_path, out_xml).create_standard_xml()
|
||||
shutil.copy(meta_xml_path, out_xml)
|
||||
|
||||
# 生成压缩包
|
||||
logger.info('progress bar :94%')
|
||||
logger.info('start make targz..')
|
||||
self.del_floder(self.__workspace_unpack_path)
|
||||
self.del_floder(self.__workspace_ResampledDEM_path)
|
||||
self.del_floder(self.__workspace_LutImg_path)
|
||||
self.del_floder(self.__workspace_IncidenceImg_path)
|
||||
self.del_floder(self.__workspace_SimImg_path)
|
||||
self.del_floder(self.__workspace_SARIntensity_path)
|
||||
self.make_targz(self.__out_para, self.__workspace_package_path+"\\")
|
||||
logger.info('make targz finish')
|
||||
logger.info('progress bar :100%')
|
||||
return True
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
DEBUG = config['config']['debug']# True
|
||||
if '-DEBUG' in sys.argv:
|
||||
DEBUG=True
|
||||
start = datetime.datetime.now()
|
||||
try:
|
||||
if len(sys.argv) < 2:
|
||||
xml_path = 'Ortho-S-SAR.xml'
|
||||
else:
|
||||
xml_path = sys.argv[1]
|
||||
OrthoMain = OrthoMain(xml_path)
|
||||
if OrthoMain.check_source() is False:
|
||||
raise Exception('check_source() failed!')
|
||||
if OrthoMain.process_handle() is False:
|
||||
raise Exception('check_source() failed!')
|
||||
logger.info('successful production of ortho products!')
|
||||
except Exception:
|
||||
logger.exception("run-time error!")
|
||||
finally:
|
||||
OrthoMain.del_temp_workspace()
|
||||
pass
|
||||
end = datetime.datetime.now()
|
||||
logger.info('running use time: %s ' % (end - start))
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
# -*- mode: python ; coding: utf-8 -*-
|
||||
import sys
|
||||
from shutil import copy
|
||||
import os
|
||||
|
||||
cwdpath = os.getcwd()
|
||||
toolDir = os.path.join(cwdpath, 'tool')
|
||||
if os.path.exists(toolDir):
|
||||
os.remove(toolDir)
|
||||
os.mkdir(toolDir)
|
||||
source_folder = '../tool'
|
||||
|
||||
def copy_file(path_read, path_write):
|
||||
names = os.listdir(path_read)
|
||||
for name in names:
|
||||
path_read_new = os.path.join(path_read, name)
|
||||
path_write_new = os.path.join(path_write, name)
|
||||
if os.path.isdir(path_read_new):
|
||||
if not os.path.exists(path_write_new):
|
||||
os.mkdir(path_write_new)
|
||||
copy_file(path_read_new, path_write_new)
|
||||
else:
|
||||
copy(path_read_new, path_write_new)
|
||||
|
||||
copy_file(source_folder, toolDir)
|
||||
block_cipher = None
|
||||
|
||||
|
||||
a = Analysis(['OrthoMain.py'],
|
||||
pathex=[],
|
||||
binaries=[],
|
||||
datas=[('D:/Anaconda/envs/micro/Lib/site-packages/dask/dask.yaml', './dask'), ('D:/Anaconda/envs/micro/Lib/site-packages/distributed/distributed.yaml', './distributed')],
|
||||
hiddenimports=['pyproj._compat'],
|
||||
hookspath=[],
|
||||
hooksconfig={},
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
noarchive=False)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
[],
|
||||
name='OrthoMain',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=True,
|
||||
disable_windowed_traceback=False,
|
||||
target_arch=None,
|
||||
codesign_identity=None,
|
||||
entitlements_file=None )
|
|
@ -0,0 +1,310 @@
|
|||
"""
|
||||
@Project :microproduct
|
||||
@File :BackScatteringXmlInfo.PY
|
||||
@Function :主函数
|
||||
@Author :LMM
|
||||
@Date :2021/10/19 14:39
|
||||
@Version :1.0.0
|
||||
"""
|
||||
import os
|
||||
from xml.etree.ElementTree import ElementTree, Element
|
||||
import xml.dom.minidom
|
||||
from lxml import etree
|
||||
import shutil
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
from tool.algorithm.algtools.PreProcess import PreProcess as pp
|
||||
from osgeo import gdal
|
||||
import numpy as np
|
||||
import datetime
|
||||
from PIL import Image
|
||||
|
||||
|
||||
class CreateDict:
|
||||
"""根据影像/DEM的属性信息添加到字典中"""
|
||||
def __init__(self):
|
||||
self.ImageHandler = ImageHandler()
|
||||
pass
|
||||
|
||||
def calu_nature(self, image_path, image_pair, out_path1, out_path2):
|
||||
"""
|
||||
将productinfo节点需要填写的信息存入字典中
|
||||
image_path:影像路径
|
||||
image_pair:输入的压缩包中的极化对 例:hh,hv,vh,vv=【1,1,1,1】
|
||||
out_path1:地理转平面的输出路径
|
||||
out_path2:平面转地理的输出路径
|
||||
"""
|
||||
|
||||
para_dict = {}
|
||||
imageinfo_width = self.ImageHandler.get_img_width(image_path)
|
||||
para_dict.update({"imageinfo_width":imageinfo_width})
|
||||
imageinfo_height = self.ImageHandler.get_img_height(image_path)
|
||||
para_dict.update({"imageinfo_height":imageinfo_height})
|
||||
para_dict.update({"imageinfo_EarthModel": "WGS84"})
|
||||
para_dict.update({"imageinfo_ProjectModel": "UTM"})
|
||||
proj = self.ImageHandler.get_projection(image_path) # 输出的影像若是投影坐标系则先转成地理坐标系
|
||||
keyword = proj.split("[", 2)[0] # 若是地理坐标系则pass
|
||||
if keyword == "GEOGCS":
|
||||
pass
|
||||
elif keyword == "PROJCS":
|
||||
pp.trans_projcs2geogcs(out_path2, image_path)
|
||||
image_path = out_path2
|
||||
elif len(keyword) == 0 or keyword.strip() == "" or keyword.isspace() is True:
|
||||
raise Exception('image projection is missing!')
|
||||
|
||||
pp.trans_geogcs2projcs(out_path1, image_path) # 坐标投影, 地理转平面投影坐标
|
||||
imageinfo_widthspace = self.ImageHandler.get_geotransform(out_path1)[1] # 投影后的分辨率
|
||||
imageinfo_heightspace = -self.ImageHandler.get_geotransform(out_path1)[5] # 投影后的分辨率
|
||||
para_dict.update({"imageinfo_widthspace":imageinfo_widthspace})
|
||||
para_dict.update({"imageinfo_heightspace":imageinfo_heightspace})
|
||||
para_dict.update({"NominalResolution":imageinfo_widthspace})
|
||||
|
||||
WidthInMeters = imageinfo_width*imageinfo_widthspace # 投影后的分辨率×宽度
|
||||
para_dict.update({"WidthInMeters":WidthInMeters})
|
||||
|
||||
image_array = self.ImageHandler.get_band_array(image_path)
|
||||
a2 = np.where(np.isnan(image_array), 999999, image_array)
|
||||
MinValue = np.min(a2)
|
||||
a3 = np.where(np.isnan(image_array), -999999, image_array)
|
||||
MaxValue = np.max(a3)
|
||||
|
||||
para_dict.update({"MaxValue":MaxValue})
|
||||
para_dict.update({"MinValue":MinValue})
|
||||
|
||||
get_scope = self.ImageHandler.get_scope(image_path)
|
||||
point_upleft, point_upright, point_downleft, point_downright=get_scope[0], get_scope[1], get_scope[2], get_scope[3]
|
||||
para_dict.update({"imageinfo_corner_topLeft_latitude": point_upleft[1]})
|
||||
para_dict.update({"imageinfo_corner_topLeft_longitude": point_upleft[0]})
|
||||
para_dict.update({"imageinfo_corner_topRight_latitude": point_upright[1]})
|
||||
para_dict.update({"imageinfo_corner_topRight_longitude": point_upright[0]})
|
||||
para_dict.update({"imageinfo_corner_bottomLeft_latitude": point_downleft[1]})
|
||||
para_dict.update({"imageinfo_corner_bottomLeft_longitude": point_downleft[0]})
|
||||
para_dict.update({"imageinfo_corner_bottomRight_latitude": point_downright[1]})
|
||||
para_dict.update({"imageinfo_corner_bottomRight_longitude": point_downright[0]})
|
||||
|
||||
longitude_max=np.array([point_upleft[0], point_upright[0], point_downleft[0], point_downright[0]]).max()
|
||||
longitude_min=np.array([point_upleft[0], point_upright[0], point_downleft[0], point_downright[0]]).min()
|
||||
latitude_max=np.array([point_upleft[1], point_upright[1], point_downleft[1], point_downright[1]]).max()
|
||||
latitude_min=np.array([point_upleft[1], point_upright[1], point_downleft[1], point_downright[1]]).min()
|
||||
imageinfo_center_latitude=(latitude_max+latitude_min)/2
|
||||
imageinfo_center_longitude=(longitude_max+longitude_min)/2
|
||||
para_dict.update({"imageinfo_center_latitude": imageinfo_center_latitude})
|
||||
para_dict.update({"imageinfo_center_longitude": imageinfo_center_longitude})
|
||||
|
||||
# self.para_dict.update({"productType": "GTC"}) # 设置产品类型
|
||||
para_dict.update({"productFormat": "TIF"})
|
||||
productGentime = datetime.datetime.now()
|
||||
para_dict.update({"productGentime": productGentime})
|
||||
para_dict.update({"unit": "none"}) # 设置单位
|
||||
para_dict.update({"NoDataValue": "nan"})
|
||||
para_dict.update({"productLevel": "4"}) # 设置图像位深度
|
||||
|
||||
image_array = self.ImageHandler.get_band_array(image_path)
|
||||
try: # 设置图像位深度
|
||||
gdal_dtypes = {
|
||||
'int8': gdal.GDT_Byte,
|
||||
'unit16': gdal.GDT_UInt16,
|
||||
'int16': gdal.GDT_Int16,
|
||||
'unit32': gdal.GDT_UInt32,
|
||||
'int32': gdal.GDT_Int32,
|
||||
'float32': gdal.GDT_Float32,
|
||||
'float64': gdal.GDT_Float64,
|
||||
}
|
||||
bit_dtypes = {
|
||||
'int8': 8,
|
||||
'unit16': 16,
|
||||
'int16': 16,
|
||||
'unit32': 32,
|
||||
'int32': 32,
|
||||
'float32': 32,
|
||||
'float64': 64,
|
||||
}
|
||||
if not gdal_dtypes.get(image_array.dtype.name, None) is None:
|
||||
bit_num = str(bit_dtypes[image_array.dtype.name])
|
||||
datatype=bit_num+"bit"
|
||||
else:
|
||||
datatype = str(32) + "bit"
|
||||
# datatype = str(gdal.GDT_Float32)+"bit"
|
||||
para_dict.update({"imagebit": datatype})
|
||||
except Exception:
|
||||
para_dict.update({"imagebit": "None"})
|
||||
|
||||
HH, HV, VH ,VV= image_pair[0], image_pair[1], image_pair[2], image_pair[3]
|
||||
if HH == 0:
|
||||
HH = "delete"
|
||||
else:
|
||||
HH = "NULL"
|
||||
para_dict.update({"imageinfo_QualifyValue_HH": HH})
|
||||
if HV==0:
|
||||
HV = "delete"
|
||||
else:
|
||||
HV = "NULL"
|
||||
para_dict.update({"imageinfo_QualifyValue_HV": HV})
|
||||
if VH==0:
|
||||
VH = "delete"
|
||||
else:
|
||||
VH = "NULL"
|
||||
para_dict.update({"imageinfo_QualifyValue_VH": VH})
|
||||
if VV==0:
|
||||
VV = "delete"
|
||||
else:
|
||||
VV = "NULL"
|
||||
para_dict.update({"imageinfo_QualifyValue_VV": VV})
|
||||
|
||||
return para_dict
|
||||
|
||||
def calu_dem_nature(self, dem_path, out_dem_path1, out_dem_path2, sampling_f, para_A_arr):
|
||||
"""
|
||||
正射需要单独加上dem影像的信息
|
||||
dem_path:mergedDEM.tif路径
|
||||
out_dem_path1:将mergedDEM.tif由地理坐标转化为平面坐标的保存路径
|
||||
out_dem_path2:将mergedDEM.tif由平面坐标转化为地理坐标的保存路径
|
||||
sampling_f: 采样率
|
||||
para_A_arr:四次多项式模型的参数数组
|
||||
"""
|
||||
para_dict2 = {}
|
||||
proj = self.ImageHandler.get_projection(dem_path) # 输出的影像若是投影坐标系则先转成地理坐标系
|
||||
keyword = proj.split("[", 2)[0] # 若是地理坐标系则pass
|
||||
if keyword == "GEOGCS":
|
||||
pass
|
||||
elif keyword == "PROJCS":
|
||||
pp.trans_projcs2geogcs(out_dem_path2, dem_path)
|
||||
dem_path = out_dem_path2
|
||||
elif len(keyword) == 0 or keyword.strip() == "" or keyword.isspace() is True:
|
||||
raise Exception('image projection is missing!')
|
||||
pp.trans_geogcs2projcs(out_dem_path1, dem_path) # 坐标投影, 地理转平面投影坐标
|
||||
DEM_widthspace = self.ImageHandler.get_geotransform(out_dem_path1)[1] # 投影后的分辨率
|
||||
DEM_heightspace = -self.ImageHandler.get_geotransform(out_dem_path1)[5] # 投影后的分辨率
|
||||
para_dict2.update({"DEM_widthspace":DEM_widthspace})
|
||||
para_dict2.update({"DEM_heightspace":DEM_heightspace})
|
||||
# tree = ElementTree() # 获取DEMProduct:dem产品来源、DEMDate:dem对应的日期
|
||||
# tree.parse(dem_meta) # 影像头文件
|
||||
# root = tree.getroot()
|
||||
# productinfo = root.find("metadata")
|
||||
# DEMProduct = list(productinfo)[0].tag
|
||||
# para_dict2.update({"DEM_DEMProduct":DEMProduct})
|
||||
#
|
||||
# DEMDate = root.find("metadata").find(DEMProduct).text
|
||||
# para_dict2.update({"DEM_DEMDate": DEMDate})
|
||||
get_scope = self.ImageHandler.get_scope(dem_path) # 获取mergedDEM.tif数据的四个角的经纬度信息
|
||||
point_upleft, point_upright, point_downleft, point_downright=get_scope[0], get_scope[1], get_scope[2], get_scope[3]
|
||||
para_dict2.update({"DEM_corner_topLeft_latitude": point_upleft[1]})
|
||||
para_dict2.update({"DEM_corner_topLeft_longitude": point_upleft[0]})
|
||||
para_dict2.update({"DEM_corner_topRight_latitude": point_upright[1]})
|
||||
para_dict2.update({"DEM_corner_topRight_longitude": point_upright[0]})
|
||||
para_dict2.update({"DEM_corner_bottomLeft_latitude": point_downleft[1]})
|
||||
para_dict2.update({"DEM_corner_bottomLeft_longitude": point_downleft[0]})
|
||||
para_dict2.update({"DEM_corner_bottomRight_latitude": point_downright[1]})
|
||||
para_dict2.update({"DEM_corner_bottomRight_longitude": point_downright[0]})
|
||||
#para_dict2.update({"orthoModel_samplingrate": sampling_f})
|
||||
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_X_a0": para_A_arr[0, 0]}) # 获取四次多项式模型6个参数的数值
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_X_a1": para_A_arr[1, 0]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_X_a2": para_A_arr[2, 0]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_X_a3": para_A_arr[3, 0]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_X_a4": para_A_arr[4, 0]})
|
||||
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Y_b0": para_A_arr[0, 1]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Y_b1": para_A_arr[1, 1]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Y_b2": para_A_arr[2, 1]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Y_b3": para_A_arr[3, 1]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Y_b4": para_A_arr[4, 1]})
|
||||
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Z_c0": para_A_arr[0, 2]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Z_c1": para_A_arr[1, 2]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Z_c2": para_A_arr[2, 2]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Z_c3": para_A_arr[3, 2]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Z_c4": para_A_arr[4, 2]})
|
||||
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vx_d0": para_A_arr[0, 3]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vx_d1": para_A_arr[1, 3]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vx_d2": para_A_arr[2, 3]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vx_d3": para_A_arr[3, 3]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vx_d4": para_A_arr[4, 3]})
|
||||
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vy_e0": para_A_arr[0, 4]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vy_e1": para_A_arr[1, 4]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vy_e2": para_A_arr[2, 4]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vy_e3": para_A_arr[3, 4]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vy_e4": para_A_arr[4, 4]})
|
||||
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vz_f0": para_A_arr[0, 5]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vz_f1": para_A_arr[1, 5]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vz_f2": para_A_arr[2, 5]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vz_f3": para_A_arr[3, 5]})
|
||||
para_dict2.update({"satalliteOrbitModel_parameter_Vz_f4": para_A_arr[4, 5]})
|
||||
return para_dict2
|
||||
|
||||
|
||||
class CreateStadardXmlFile:
|
||||
"""读取字典中的属性值,生成一个标准的xml文件"""
|
||||
def __init__(self, xml_path, para_xml_path, par_dict, par_dict2, path):
|
||||
"""
|
||||
xml_path:模板路径
|
||||
para_xml_path:算法配置文件的路径
|
||||
par_dict:字典
|
||||
path:xml模板输出路径
|
||||
"""
|
||||
self.par_dict = par_dict
|
||||
self.par_dict2 = par_dict2
|
||||
self.path = path
|
||||
shutil.copy(xml_path, path)
|
||||
pass
|
||||
|
||||
def create_standard_xml(self):
|
||||
"""将字典中的信息写入到copy的xml文件中"""
|
||||
tree = ElementTree()
|
||||
tree.parse(self.path) # 影像头文件
|
||||
root = tree.getroot()
|
||||
|
||||
productinfo = root.find("productinfo")
|
||||
for key, value in self.par_dict.items():
|
||||
if key.split("_")[0] != "imageinfo":
|
||||
productinfo.find(key).text = str(value)
|
||||
elif key.split("_")[0] == "imageinfo":
|
||||
imageinfo = productinfo.find("imageinfo")
|
||||
if key.split("_")[1] in ["EarthModel", "ProjectModel", "width", "height", "widthspace", "heightspace"]:
|
||||
imageinfo.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[1] == "center":
|
||||
center = imageinfo.find("center")
|
||||
center.find(key.split("_")[2]).text = str(value)
|
||||
elif key.split("_")[1] == "corner":
|
||||
corner = imageinfo.find("corner")
|
||||
corner.find(key.split("_")[2]).find(key.split("_")[3]).text = str(value)
|
||||
elif key.split("_")[1] == "QualifyValue":
|
||||
QualifyValue = imageinfo.find("QualifyValue")
|
||||
if value =="delete":
|
||||
element_QualifyValue = list(QualifyValue)
|
||||
for i in element_QualifyValue:
|
||||
if i.tag == key.split("_")[2]:
|
||||
QualifyValue.remove(i)
|
||||
else:
|
||||
QualifyValue.find(key.split("_")[2]).text = str(value)
|
||||
pass
|
||||
orthoModel = root.find("processinfo").find("orthoModel") # 写入四次多项式模型
|
||||
for key, value in self.par_dict2.items():
|
||||
if key.split("_")[0] == "satalliteOrbitModel":
|
||||
satalliteOrbitModel = orthoModel.find("satalliteOrbitModel")
|
||||
satalliteOrbitModel.find(key.split("_")[1]).find(key.split("_")[2]).find(key.split("_")[3]).text = str(value)
|
||||
elif key.split("_")[0] == "DEM": # 写入dem四个角坐标
|
||||
DEM= orthoModel.find("DEM")
|
||||
if key.split("_")[1] == "corner":
|
||||
corner = DEM.find("corner")
|
||||
corner.find(key.split("_")[2]).find(key.split("_")[3]).text = str(value)
|
||||
elif key.split("_")[1] == "widthspace" or key.split("_")[1] == "heightspace":
|
||||
DEM.find(key.split("_")[1]).text = str(value)
|
||||
elif key.split("_")[1] == "samplingrate":
|
||||
orthoModel.find(key.split("_")[1]).text = str(value)
|
||||
tree.write(self.path, encoding="utf-8", xml_declaration=True)
|
||||
|
||||
#
|
||||
# if __name__ == '__main__':
|
||||
#
|
||||
# xml_path = "./model_meta.xml"
|
||||
# tem_folder= r"E:\microproduct\测试"
|
||||
# image_path = r"E:\microproduct\测试\GF3_MYN_QPSI_011437_E98_HH_AtmosphericDelay.tif" # 输入影像
|
||||
# out_path = os.path.join(tem_folder, "trans_geo_projcs.tif")
|
||||
# image_pair=[1, 1, 1, 0]
|
||||
# par_dict = CreateDict(image_path, image_pair, out_path).calu_nature() # 计算属性字典
|
||||
#
|
||||
# out_xml_path = os.path.join(tem_folder, "creat_standard.meta.xml") # 输出xml路径
|
||||
# CreateStadardXmlFile(xml_path, par_dict, out_xml_path).create_standard_xml()
|
|
@ -0,0 +1,11 @@
|
|||
# -*- coding: UTF-8 -*-
|
||||
# 定义config分组
|
||||
[config]
|
||||
######1-算法基本参数######
|
||||
productLevel = 3
|
||||
target = Ortho
|
||||
# 算法名称。修改临时工作区生成临时文件的名称,日志名称;
|
||||
exe_name = Ortho
|
||||
# 开启调试模式则不删除临时工作区,True:开启调试,False:不开启调试
|
||||
debug = True
|
||||
|
|
@ -0,0 +1,98 @@
|
|||
SatelliteOribit: # 轨道起算时间
|
||||
StartTime:
|
||||
Value:
|
||||
'2017-09-15 01:55:21.0000'
|
||||
format: # 时间格式
|
||||
"%Y-%m-%d %H:%M:%S.%f"
|
||||
ReferenceSpheroid:
|
||||
Re: # 长半轴
|
||||
6378137
|
||||
Rp: # 短半轴
|
||||
6356752.3142451795
|
||||
we:
|
||||
0.000072292115
|
||||
GPS: # GPS 轨道节点
|
||||
GPSNode_Path:
|
||||
['product','GPS','GPSParam']
|
||||
NodeInfomation_Name: # [时间,x坐标,y坐标,z坐标,x速度,y速度,z速度]
|
||||
['TimeStamp', 'xPosition', 'yPosition', 'zPosition', 'xVelocity', 'yVelocity', 'zVelocity']
|
||||
Time_format:
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
|
||||
imageinfo: # 影像信息
|
||||
ImageBox:
|
||||
NodePath:
|
||||
['product','imageinfo','corner']
|
||||
NodeName:
|
||||
['topLeft','topRight','bottomLeft','bottomRight']
|
||||
latLon:
|
||||
["latitude","longitude"]
|
||||
ImageWidthSpace:
|
||||
NodePath:
|
||||
['product','imageinfo','widthspace']
|
||||
ImageHeightSpace:
|
||||
NodePath:
|
||||
['product','imageinfo','heightspace']
|
||||
ImageWidth: # 影像宽
|
||||
NodePath:
|
||||
['product','imageinfo','width']
|
||||
ImageHeight: # 影像高
|
||||
NodePath:
|
||||
['product','imageinfo','height']
|
||||
StartImageTime: # 影像开始时间
|
||||
NodePath:
|
||||
['product','imageinfo','imagingTime',start]
|
||||
Format:
|
||||
"%Y-%m-%d %H:%M:%S.%f"
|
||||
EndImageTime: # 影像中止时间
|
||||
NodePath:
|
||||
['product','imageinfo','imagingTime',end]
|
||||
Format:
|
||||
"%Y-%m-%d %H:%M:%S.%f"
|
||||
CenterImageTime: # 中心像元时间
|
||||
NodePath:
|
||||
['product','platform','CenterTime']
|
||||
Format:
|
||||
"%Y-%m-%d %H:%M:%S"
|
||||
CenterImagePositon: # 中心像元,可以作为迭代起算点
|
||||
NodePath:
|
||||
['product','imageinfo','center']
|
||||
Value:
|
||||
['latitude','longitude']
|
||||
NearRange: # 近斜距
|
||||
NodePath:
|
||||
['product','imageinfo','nearRange']
|
||||
DopplerCentroidCoefficients: # 多普勒质心常数
|
||||
NodePath:
|
||||
['product','processinfo','DopplerCentroidCoefficients']
|
||||
DopplerCentroidCoefficients_Name:
|
||||
['d0','d1','d2','d3','d4']
|
||||
DopplerParametersReferenceTime:
|
||||
NodePath:
|
||||
['product','processinfo',"DopplerParametersReferenceTime"]
|
||||
ReferenceRange:
|
||||
NodePath:
|
||||
['product','imageinfo','refRange']
|
||||
incidenceAngle: # 入射角
|
||||
NearRange: # 近入射角
|
||||
NodePath:
|
||||
['product','processinfo','incidenceAngleNearRange']
|
||||
FarRange: # 远入射角
|
||||
NodePath:
|
||||
['product','processinfo','incidenceAngleFarRange']
|
||||
|
||||
sensor:
|
||||
PRF: # 脉冲重复频率
|
||||
NodePath:
|
||||
['product','imageinfo','eqvPRF']
|
||||
bandWidth: # 信号带宽,计算距离向分辨率 用于距离向分辨率
|
||||
NodePath:
|
||||
['product','sensor','waveParams','wave','bandWidth']
|
||||
lambda: # 波长
|
||||
NodePath:
|
||||
['product','sensor','lamda']
|
||||
Fs: # 等效采样频率 eqvFs
|
||||
NodePath:
|
||||
['product','imageinfo','eqvFs']
|
||||
LightSpeed:
|
||||
299792458
|
|
@ -0,0 +1,330 @@
|
|||
#命名为:geo_rpc.py
|
||||
"""
|
||||
RPC model parsers, localization, and projection
|
||||
"""
|
||||
import numpy as np
|
||||
from osgeo import gdal
|
||||
|
||||
#最大迭代次数超过则报错
|
||||
class MaxLocalizationIterationsError(Exception):
|
||||
"""
|
||||
Custom rpcm Exception.
|
||||
"""
|
||||
pass
|
||||
|
||||
def apply_poly(poly, x, y, z):
|
||||
"""
|
||||
Evaluates a 3-variables polynom of degree 3 on a triplet of numbers.
|
||||
将三次多项式的统一模式构建为一个单独的函数
|
||||
Args:
|
||||
poly: list of the 20 coefficients of the 3-variate degree 3 polynom,
|
||||
ordered following the RPC convention.
|
||||
x, y, z: triplet of floats. They may be numpy arrays of same length.
|
||||
Returns:
|
||||
the value(s) of the polynom on the input point(s).
|
||||
"""
|
||||
out = 0
|
||||
out += poly[0]
|
||||
out += poly[1]*y + poly[2]*x + poly[3]*z
|
||||
out += poly[4]*y*x + poly[5]*y*z +poly[6]*x*z
|
||||
out += poly[7]*y*y + poly[8]*x*x + poly[9]*z*z
|
||||
out += poly[10]*x*y*z
|
||||
out += poly[11]*y*y*y
|
||||
out += poly[12]*y*x*x + poly[13]*y*z*z + poly[14]*y*y*x
|
||||
out += poly[15]*x*x*x
|
||||
out += poly[16]*x*z*z + poly[17]*y*y*z + poly[18]*x*x*z
|
||||
out += poly[19]*z*z*z
|
||||
return out
|
||||
|
||||
def apply_rfm(num, den, x, y, z):
|
||||
"""
|
||||
Evaluates a Rational Function Model (rfm), on a triplet of numbers.
|
||||
执行20个参数的分子和20个参数的除法
|
||||
Args:
|
||||
num: list of the 20 coefficients of the numerator
|
||||
den: list of the 20 coefficients of the denominator
|
||||
All these coefficients are ordered following the RPC convention.
|
||||
x, y, z: triplet of floats. They may be numpy arrays of same length.
|
||||
|
||||
Returns:
|
||||
the value(s) of the rfm on the input point(s).
|
||||
"""
|
||||
return apply_poly(num, x, y, z) / apply_poly(den, x, y, z)
|
||||
|
||||
def rpc_from_geotiff(geotiff_path):
|
||||
"""
|
||||
Read the RPC coefficients from a GeoTIFF file and return an RPCModel object.
|
||||
该函数返回影像的Gdal格式的影像和RPCmodel
|
||||
Args:
|
||||
geotiff_path (str): path or url to a GeoTIFF file
|
||||
|
||||
Returns:
|
||||
instance of the rpc_model.RPCModel class
|
||||
"""
|
||||
# with rasterio.open(geotiff_path, 'r') as src:
|
||||
#
|
||||
dataset = gdal.Open(geotiff_path, gdal.GA_ReadOnly)
|
||||
rpc_dict = dataset.GetMetadata("RPC")
|
||||
# 同时返回影像与rpc
|
||||
return dataset, RPCModel(rpc_dict,'geotiff')
|
||||
|
||||
|
||||
def parse_rpc_file(rpc_file):
|
||||
# rpc_file:.rpc文件的绝对路径
|
||||
# rpc_dict:符号RPC域下的16个关键字的字典
|
||||
# 参考网址:http://geotiff.maptools.org/rpc_prop.html;
|
||||
# https://www.osgeo.cn/gdal/development/rfc/rfc22_rpc.html
|
||||
|
||||
rpc_dict = {}
|
||||
with open(rpc_file) as f:
|
||||
text = f.read()
|
||||
|
||||
# .rpc文件中的RPC关键词
|
||||
words = ['errBias', 'errRand', 'lineOffset', 'sampOffset', 'latOffset',
|
||||
'longOffset', 'heightOffset', 'lineScale', 'sampScale', 'latScale',
|
||||
'longScale', 'heightScale', 'lineNumCoef', 'lineDenCoef','sampNumCoef', 'sampDenCoef',]
|
||||
|
||||
# GDAL库对应的RPC关键词
|
||||
keys = ['ERR_BIAS', 'ERR_RAND', 'LINE_OFF', 'SAMP_OFF', 'LAT_OFF', 'LONG_OFF',
|
||||
'HEIGHT_OFF', 'LINE_SCALE', 'SAMP_SCALE', 'LAT_SCALE',
|
||||
'LONG_SCALE', 'HEIGHT_SCALE', 'LINE_NUM_COEFF', 'LINE_DEN_COEFF',
|
||||
'SAMP_NUM_COEFF', 'SAMP_DEN_COEFF']
|
||||
|
||||
for old, new in zip(words, keys):
|
||||
text = text.replace(old, new)
|
||||
# 以‘;\n’作为分隔符
|
||||
text_list = text.split(';\n')
|
||||
# 删掉无用的行
|
||||
text_list = text_list[3:-2]
|
||||
#
|
||||
text_list[0] = text_list[0].split('\n')[1]
|
||||
# 去掉制表符、换行符、空格
|
||||
text_list = [item.strip('\t').replace('\n', '').replace(' ', '') for item in text_list]
|
||||
|
||||
for item in text_list:
|
||||
# 去掉‘=’
|
||||
key, value = item.split('=')
|
||||
# 去掉多余的括号‘(’,‘)’
|
||||
if '(' in value:
|
||||
value = value.replace('(', '').replace(')', '')
|
||||
rpc_dict[key] = value
|
||||
|
||||
for key in keys[:12]:
|
||||
# 为正数添加符号‘+’
|
||||
if not rpc_dict[key].startswith('-'):
|
||||
rpc_dict[key] = '+' + rpc_dict[key]
|
||||
# 为归一化项和误差标志添加单位
|
||||
if key in ['LAT_OFF', 'LONG_OFF', 'LAT_SCALE', 'LONG_SCALE']:
|
||||
rpc_dict[key] = rpc_dict[key] + ' degrees'
|
||||
if key in ['LINE_OFF', 'SAMP_OFF', 'LINE_SCALE', 'SAMP_SCALE']:
|
||||
rpc_dict[key] = rpc_dict[key] + ' pixels'
|
||||
if key in ['ERR_BIAS', 'ERR_RAND', 'HEIGHT_OFF', 'HEIGHT_SCALE']:
|
||||
rpc_dict[key] = rpc_dict[key] + ' meters'
|
||||
|
||||
# 处理有理函数项
|
||||
for key in keys[-4:]:
|
||||
values = []
|
||||
for item in rpc_dict[key].split(','):
|
||||
#print(item)
|
||||
if not item.startswith('-'):
|
||||
values.append('+'+item)
|
||||
else:
|
||||
values.append(item)
|
||||
rpc_dict[key] = ' '.join(values)
|
||||
return rpc_dict
|
||||
|
||||
|
||||
def read_rpc_file(rpc_file):
|
||||
"""
|
||||
Read RPC from a RPC_txt file and return a RPCmodel
|
||||
从TXT中直接单独读取RPC模型
|
||||
Args:
|
||||
rpc_file: RPC sidecar file path
|
||||
|
||||
Returns:
|
||||
dictionary read from the RPC file, or an empty dict if fail
|
||||
|
||||
"""
|
||||
rpc = parse_rpc_file(rpc_file)
|
||||
return RPCModel(rpc)
|
||||
|
||||
class RPCModel:
|
||||
def __init__(self, d, dict_format="geotiff"):
|
||||
"""
|
||||
Args:
|
||||
d (dict): dictionary read from a geotiff file with
|
||||
rasterio.open('/path/to/file.tiff', 'r').tags(ns='RPC'),
|
||||
or from the .__dict__ of an RPCModel object.
|
||||
dict_format (str): format of the dictionary passed in `d`.
|
||||
Either "geotiff" if read from the tags of a geotiff file,
|
||||
or "rpcm" if read from the .__dict__ of an RPCModel object.
|
||||
"""
|
||||
if dict_format == "geotiff":
|
||||
self.row_offset = float(d['LINE_OFF'][0:d['LINE_OFF'].rfind(' ')])
|
||||
self.col_offset = float(d['SAMP_OFF'][0:d['SAMP_OFF'].rfind(' ')])
|
||||
self.lat_offset = float(d['LAT_OFF'][0:d['LAT_OFF'].rfind(' ')])
|
||||
self.lon_offset = float(d['LONG_OFF'][0:d['LONG_OFF'].rfind(' ')])
|
||||
self.alt_offset = float(d['HEIGHT_OFF'][0:d['HEIGHT_OFF'].rfind(' ')])
|
||||
|
||||
self.row_scale = float(d['LINE_SCALE'][0:d['LINE_SCALE'].rfind(' ')])
|
||||
self.col_scale = float(d['SAMP_SCALE'][0:d['SAMP_SCALE'].rfind(' ')])
|
||||
self.lat_scale = float(d['LAT_SCALE'][0:d['LAT_SCALE'].rfind(' ')])
|
||||
self.lon_scale = float(d['LONG_SCALE'][0:d['LONG_SCALE'].rfind(' ')])
|
||||
self.alt_scale = float(d['HEIGHT_SCALE'][0:d['HEIGHT_SCALE'].rfind(' ')])
|
||||
|
||||
self.row_num = list(map(float, d['LINE_NUM_COEFF'].split()))
|
||||
self.row_den = list(map(float, d['LINE_DEN_COEFF'].split()))
|
||||
self.col_num = list(map(float, d['SAMP_NUM_COEFF'].split()))
|
||||
self.col_den = list(map(float, d['SAMP_DEN_COEFF'].split()))
|
||||
|
||||
if 'LON_NUM_COEFF' in d:
|
||||
self.lon_num = list(map(float, d['LON_NUM_COEFF'].split()))
|
||||
self.lon_den = list(map(float, d['LON_DEN_COEFF'].split()))
|
||||
self.lat_num = list(map(float, d['LAT_NUM_COEFF'].split()))
|
||||
self.lat_den = list(map(float, d['LAT_DEN_COEFF'].split()))
|
||||
|
||||
elif dict_format == "rpcm":
|
||||
self.__dict__ = d
|
||||
|
||||
else:
|
||||
raise ValueError(
|
||||
"dict_format '{}' not supported. "
|
||||
"Should be {{'geotiff','rpcm'}}".format(dict_format)
|
||||
)
|
||||
|
||||
|
||||
def projection(self, lon, lat, alt):
|
||||
"""
|
||||
Convert geographic coordinates of 3D points into image coordinates.
|
||||
正投影:从地理坐标到图像坐标
|
||||
Args:
|
||||
lon (float or list): longitude(s) of the input 3D point(s)
|
||||
lat (float or list): latitude(s) of the input 3D point(s)
|
||||
alt (float or list): altitude(s) of the input 3D point(s)
|
||||
|
||||
Returns:
|
||||
float or list: horizontal image coordinate(s) (column index, ie x)
|
||||
float or list: vertical image coordinate(s) (row index, ie y)
|
||||
"""
|
||||
nlon = (np.asarray(lon) - self.lon_offset) / self.lon_scale
|
||||
nlat = (np.asarray(lat) - self.lat_offset) / self.lat_scale
|
||||
nalt = (np.asarray(alt) - self.alt_offset) / self.alt_scale
|
||||
|
||||
col = apply_rfm(self.col_num, self.col_den, nlat, nlon, nalt)
|
||||
row = apply_rfm(self.row_num, self.row_den, nlat, nlon, nalt)
|
||||
|
||||
col = col * self.col_scale + self.col_offset
|
||||
row = row * self.row_scale + self.row_offset
|
||||
|
||||
return col, row
|
||||
|
||||
|
||||
def localization(self, col, row, alt, return_normalized=False):
|
||||
"""
|
||||
Convert image coordinates plus altitude into geographic coordinates.
|
||||
反投影:从图像坐标到地理坐标
|
||||
Args:
|
||||
col (float or list): x image coordinate(s) of the input point(s)
|
||||
row (float or list): y image coordinate(s) of the input point(s)
|
||||
alt (float or list): altitude(s) of the input point(s)
|
||||
|
||||
Returns:
|
||||
float or list: longitude(s)
|
||||
float or list: latitude(s)
|
||||
"""
|
||||
ncol = (np.asarray(col) - self.col_offset) / self.col_scale
|
||||
nrow = (np.asarray(row) - self.row_offset) / self.row_scale
|
||||
nalt = (np.asarray(alt) - self.alt_offset) / self.alt_scale
|
||||
|
||||
if not hasattr(self, 'lat_num'):
|
||||
lon, lat = self.localization_iterative(ncol, nrow, nalt)
|
||||
else:
|
||||
lon = apply_rfm(self.lon_num, self.lon_den, nrow, ncol, nalt)
|
||||
lat = apply_rfm(self.lat_num, self.lat_den, nrow, ncol, nalt)
|
||||
|
||||
if not return_normalized:
|
||||
lon = lon * self.lon_scale + self.lon_offset
|
||||
lat = lat * self.lat_scale + self.lat_offset
|
||||
|
||||
return lon, lat
|
||||
|
||||
|
||||
def localization_iterative(self, col, row, alt):
|
||||
"""
|
||||
Iterative estimation of the localization function (image to ground),
|
||||
for a list of image points expressed in image coordinates.
|
||||
逆投影时的迭代函数
|
||||
Args:
|
||||
col, row: normalized image coordinates (between -1 and 1)
|
||||
alt: normalized altitude (between -1 and 1) of the corresponding 3D
|
||||
point
|
||||
|
||||
Returns:
|
||||
lon, lat: normalized longitude and latitude
|
||||
|
||||
Raises:
|
||||
MaxLocalizationIterationsError: if the while loop exceeds the max
|
||||
number of iterations, which is set to 100.
|
||||
"""
|
||||
# target point: Xf (f for final)
|
||||
Xf = np.vstack([col, row]).T
|
||||
|
||||
# use 3 corners of the lon, lat domain and project them into the image
|
||||
# to get the first estimation of (lon, lat)
|
||||
# EPS is 2 for the first iteration, then 0.1.
|
||||
lon = -col ** 0 # vector of ones
|
||||
lat = -col ** 0
|
||||
EPS = 2
|
||||
x0 = apply_rfm(self.col_num, self.col_den, lat, lon, alt)
|
||||
y0 = apply_rfm(self.row_num, self.row_den, lat, lon, alt)
|
||||
x1 = apply_rfm(self.col_num, self.col_den, lat, lon + EPS, alt)
|
||||
y1 = apply_rfm(self.row_num, self.row_den, lat, lon + EPS, alt)
|
||||
x2 = apply_rfm(self.col_num, self.col_den, lat + EPS, lon, alt)
|
||||
y2 = apply_rfm(self.row_num, self.row_den, lat + EPS, lon, alt)
|
||||
|
||||
n = 0
|
||||
while not np.all((x0 - col) ** 2 + (y0 - row) ** 2 < 1e-18):
|
||||
|
||||
if n > 100:
|
||||
raise MaxLocalizationIterationsError("Max localization iterations (100) exceeded")
|
||||
|
||||
X0 = np.vstack([x0, y0]).T
|
||||
X1 = np.vstack([x1, y1]).T
|
||||
X2 = np.vstack([x2, y2]).T
|
||||
e1 = X1 - X0
|
||||
e2 = X2 - X0
|
||||
u = Xf - X0
|
||||
|
||||
# project u on the base (e1, e2): u = a1*e1 + a2*e2
|
||||
# the exact computation is given by:
|
||||
# M = np.vstack((e1, e2)).T
|
||||
# a = np.dot(np.linalg.inv(M), u)
|
||||
# but I don't know how to vectorize this.
|
||||
# Assuming that e1 and e2 are orthogonal, a1 is given by
|
||||
# <u, e1> / <e1, e1>
|
||||
num = np.sum(np.multiply(u, e1), axis=1)
|
||||
den = np.sum(np.multiply(e1, e1), axis=1)
|
||||
a1 = np.divide(num, den).squeeze()
|
||||
|
||||
num = np.sum(np.multiply(u, e2), axis=1)
|
||||
den = np.sum(np.multiply(e2, e2), axis=1)
|
||||
a2 = np.divide(num, den).squeeze()
|
||||
|
||||
# use the coefficients a1, a2 to compute an approximation of the
|
||||
# point on the gound which in turn will give us the new X0
|
||||
lon += a1 * EPS
|
||||
lat += a2 * EPS
|
||||
|
||||
# update X0, X1 and X2
|
||||
EPS = .1
|
||||
x0 = apply_rfm(self.col_num, self.col_den, lat, lon, alt)
|
||||
y0 = apply_rfm(self.row_num, self.row_den, lat, lon, alt)
|
||||
x1 = apply_rfm(self.col_num, self.col_den, lat, lon + EPS, alt)
|
||||
y1 = apply_rfm(self.row_num, self.row_den, lat, lon + EPS, alt)
|
||||
x2 = apply_rfm(self.col_num, self.col_den, lat + EPS, lon, alt)
|
||||
y2 = apply_rfm(self.row_num, self.row_den, lat + EPS, lon, alt)
|
||||
|
||||
n += 1
|
||||
|
||||
return lon, lat
|
||||
|
|
@ -0,0 +1,133 @@
|
|||
|
||||
<product>
|
||||
<productinfo>
|
||||
<NominalResolution desc="分辨率"> </NominalResolution>
|
||||
<WidthInMeters> </WidthInMeters>
|
||||
<productLevel desc="产品等级"> </productLevel>
|
||||
<productType>GEC</productType>
|
||||
<productFormat>TIF</productFormat>
|
||||
<productGentime> </productGentime>
|
||||
<unit desc="单位"> </unit>
|
||||
<MinValue desc="最小值"> </MinValue>
|
||||
<MaxValue desc="最大值"> </MaxValue>
|
||||
<NoDataValue desc="无数据值">nan</NoDataValue>
|
||||
<datastructure desc="数据组织方式">BSQ</datastructure>
|
||||
<imagebit> </imagebit>
|
||||
<imageinfo desc="产品影像信息">
|
||||
<EarthModel> </EarthModel>
|
||||
<ProjectModel> </ProjectModel>
|
||||
<center>
|
||||
<latitude unit="degree"> </latitude>
|
||||
<longitude unit="degree"> </longitude>
|
||||
</center>
|
||||
<corner>
|
||||
<topLeft>
|
||||
<latitude unit="degree"> </latitude>
|
||||
<longitude unit="degree"> </longitude>
|
||||
</topLeft>
|
||||
<topRight>
|
||||
<latitude unit="degree"> </latitude>
|
||||
<longitude unit="degree"> </longitude>
|
||||
</topRight>
|
||||
<bottomLeft>
|
||||
<latitude unit="degree"> </latitude>
|
||||
<longitude unit="degree"> </longitude>
|
||||
</bottomLeft>
|
||||
<bottomRight>
|
||||
<latitude unit="degree"> </latitude>
|
||||
<longitude unit="degree"> </longitude>
|
||||
</bottomRight>
|
||||
</corner>
|
||||
<width> </width>
|
||||
<height> </height>
|
||||
<widthspace desc="宽度分辨率"> </widthspace>
|
||||
<heightspace desc="高度分辨率"> </heightspace>
|
||||
<QualifyValue desc="质量标识">
|
||||
<HH> </HH>
|
||||
<HV> </HV>
|
||||
<VH> </VH>
|
||||
<VV> </VV>
|
||||
</QualifyValue>
|
||||
</imageinfo>
|
||||
</productinfo>
|
||||
<processinfo desc="算法处理信息">
|
||||
<orthoModel desc="正射校正方法">
|
||||
<orthoModelName>模拟影像法</orthoModelName>
|
||||
<simulationModel>常数累加法</simulationModel>
|
||||
<satalliteOrbitModel desc="卫星轨道模型">
|
||||
<ModelName>四次多项式模型</ModelName>
|
||||
<EarthModel>WGS84</EarthModel>
|
||||
<parameter desc="四次多项式模型">
|
||||
<X>
|
||||
<a0> </a0>
|
||||
<a1> </a1>
|
||||
<a2> </a2>
|
||||
<a3> </a3>
|
||||
<a4> </a4>
|
||||
</X>
|
||||
<Y>
|
||||
<b0> </b0>
|
||||
<b1> </b1>
|
||||
<b2> </b2>
|
||||
<b3> </b3>
|
||||
<b4> </b4>
|
||||
</Y>
|
||||
<Z>
|
||||
<c0> </c0>
|
||||
<c1> </c1>
|
||||
<c2> </c2>
|
||||
<c3> </c3>
|
||||
<c4> </c4>
|
||||
</Z>
|
||||
<Vx>
|
||||
<d0> </d0>
|
||||
<d1> </d1>
|
||||
<d2> </d2>
|
||||
<d3> </d3>
|
||||
<d4> </d4>
|
||||
</Vx>
|
||||
<Vy>
|
||||
<e0> </e0>
|
||||
<e1> </e1>
|
||||
<e2> </e2>
|
||||
<e3> </e3>
|
||||
<e4> </e4>
|
||||
</Vy>
|
||||
<Vz>
|
||||
<f0> </f0>
|
||||
<f1> </f1>
|
||||
<f2> </f2>
|
||||
<f3> </f3>
|
||||
<f4> </f4>
|
||||
</Vz>
|
||||
</parameter>
|
||||
</satalliteOrbitModel>
|
||||
<DEM desc="DEM的参数">
|
||||
<DEMProduct desc="DEM产品的来源">NULL</DEMProduct>
|
||||
<DEMDate desc="DEM对应的时间">NULL</DEMDate>
|
||||
<heightspace desc="行分辨率"> </heightspace>
|
||||
<widthspace desc="列分辨率"> </widthspace>
|
||||
<corner desc="DEM的范围">
|
||||
<topLeft>
|
||||
<latitude> </latitude>
|
||||
<longitude> </longitude>
|
||||
</topLeft>
|
||||
<topRight>
|
||||
<latitude> </latitude>
|
||||
<longitude> </longitude>
|
||||
</topRight>
|
||||
<bottomLeft>
|
||||
<latitude> </latitude>
|
||||
<longitude> </longitude>
|
||||
</bottomLeft>
|
||||
<bottomRight>
|
||||
<latitude> </latitude>
|
||||
<longitude> </longitude>
|
||||
</bottomRight>
|
||||
</corner>
|
||||
</DEM>
|
||||
<samplingrate desc="DEM的重采样率"> </samplingrate>
|
||||
<MatchModel desc="匹配方法">标准相关匹配</MatchModel>
|
||||
</orthoModel>
|
||||
</processinfo>
|
||||
</product>
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,89 @@
|
|||
# -*- mode: python ; coding: utf-8 -*-
|
||||
import sys
|
||||
import shutil
|
||||
import os
|
||||
import tarfile
|
||||
|
||||
#文件夹遍历深度
|
||||
sys.setrecursionlimit(5000)
|
||||
block_cipher = None
|
||||
|
||||
#######begin-打包前处理##############
|
||||
#将上一级的tool文件夹替换到当前路径下的tool
|
||||
# 测试代码
|
||||
cwdpath = os.getcwd()
|
||||
tool_path = ''
|
||||
src = '../tool'
|
||||
des = os.path.join(cwdpath, "tool")
|
||||
targz_path = os.path.join(cwdpath, "tool.tar.gz")
|
||||
#创建文件夹
|
||||
if os.path.exists(des):
|
||||
if os.path.isdir(des):
|
||||
shutil.rmtree(des)
|
||||
os.makedirs(des)
|
||||
#压缩
|
||||
dir = os.path.split(targz_path )[0]
|
||||
if os.path.exists(dir) is False:
|
||||
os.makedirs(dir)
|
||||
with tarfile.open(targz_path, "w:gz") as tar:
|
||||
tar.add(src, arcname=os.path.basename(src))
|
||||
#解压
|
||||
t = tarfile.open(targz_path)
|
||||
t.extractall(path=cwdpath)
|
||||
#删除临时压缩包
|
||||
#os.remove(targz_path)
|
||||
|
||||
#生成名称
|
||||
main_name = ''
|
||||
for name in os.listdir(cwdpath):
|
||||
if 'Main.py' in name:
|
||||
main_name = name
|
||||
exe_name = exe_name = main_name .split('.')[0][:-4] + '-C-SAR-V2.0'
|
||||
#######end-打包前处理##############
|
||||
|
||||
|
||||
|
||||
#######beging-pyinstaller打包参数##############
|
||||
|
||||
a = Analysis(['OrthoMain.py',
|
||||
'./tool/algorithm/algtools/ScatteringAuxData.py',
|
||||
'./tool/algorithm/algtools/CoordinateTransformation.py',
|
||||
'./tool/algorithm/algtools/DEMJoint.py',
|
||||
'./tool/algorithm/algtools/logHandler.py',
|
||||
'./tool/algorithm/algtools/PreProcess.py',
|
||||
'./tool/algorithm/algtools/RieveFilter.py',
|
||||
'./tool/algorithm/algtools/ROIAlg.py',
|
||||
'./tool/algorithm/block/blockprocess.py',
|
||||
'./tool/algorithm/image/ImageHandle.py',
|
||||
'./tool/algorithm/xml/AlgXmlHandle.py',
|
||||
'./tool/algorithm/xml/CreatMetafile.py',
|
||||
'./tool/config/ConfigeHandle.py',
|
||||
'./tool/logs/logHandler.py',],
|
||||
pathex=[cwdpath],
|
||||
binaries=[],
|
||||
datas=[],
|
||||
hiddenimports=[],
|
||||
hookspath=[],
|
||||
runtime_hooks=[],
|
||||
excludes=[],
|
||||
win_no_prefer_redirects=False,
|
||||
win_private_assemblies=False,
|
||||
cipher=block_cipher,
|
||||
noarchive=False)
|
||||
pyz = PYZ(a.pure, a.zipped_data,
|
||||
cipher=block_cipher)
|
||||
exe = EXE(pyz,
|
||||
a.scripts,
|
||||
a.binaries,
|
||||
a.zipfiles,
|
||||
a.datas,
|
||||
[],
|
||||
name= 'Ortho-C-SAR-V2.0',
|
||||
debug=False,
|
||||
bootloader_ignore_signals=False,
|
||||
strip=False,
|
||||
upx=True,
|
||||
upx_exclude=[],
|
||||
runtime_tmpdir=None,
|
||||
console=True )
|
||||
######beging-pyinstaller打包参数#################
|
|
@ -0,0 +1,60 @@
|
|||
<Root>
|
||||
<ProductBasicInfo>
|
||||
<ProductName>正射校正</ProductName>
|
||||
<ProductIdentifier>Ortho</ProductIdentifier>
|
||||
<ProductLevel>LEVEL3</ProductLevel>
|
||||
<ProductResolution> </ProductResolution>
|
||||
<ProductDate> </ProductDate>
|
||||
<ProductFormat> </ProductFormat>
|
||||
<CompressionMethod> </CompressionMethod>
|
||||
<ProductSize> </ProductSize>
|
||||
<SpatialCoverageInformation>
|
||||
<TopLeftLatitude> </TopLeftLatitude>
|
||||
<TopLeftLongitude> </TopLeftLongitude>
|
||||
<TopRightLatitude> </TopRightLatitude>
|
||||
<TopRightLongitude> </TopRightLongitude>
|
||||
<BottomRightLatitude> </BottomRightLatitude>
|
||||
<BottomRightLongitude> </BottomRightLongitude>
|
||||
<BottomLeftLatitude> </BottomLeftLatitude>
|
||||
<BottomLeftLongitude> </BottomLeftLongitude>
|
||||
<CenterLatitude> </CenterLatitude>
|
||||
<CenterLongitude> </CenterLongitude>
|
||||
</SpatialCoverageInformation>
|
||||
<TimeCoverageInformation>
|
||||
<StartTime> </StartTime>
|
||||
<EndTime> </EndTime>
|
||||
<CenterTime> </CenterTime>
|
||||
</TimeCoverageInformation>
|
||||
<CoordinateReferenceSystemInformation>
|
||||
<MapProjection> </MapProjection>
|
||||
<EarthEllipsoid> </EarthEllipsoid>
|
||||
<ZoneNo> </ZoneNo>
|
||||
</CoordinateReferenceSystemInformation>
|
||||
<MetaInfo>
|
||||
<Unit> </Unit>
|
||||
<UnitDes> </UnitDes>
|
||||
</MetaInfo>
|
||||
</ProductBasicInfo>
|
||||
<ProductProductionInfo>
|
||||
<DataSources number="1">
|
||||
<DataSource>
|
||||
<Satellite> </Satellite>
|
||||
<Sensor> </Sensor>
|
||||
</DataSource>
|
||||
</DataSources>
|
||||
<ObservationGeometry>
|
||||
<SatelliteAzimuth> </SatelliteAzimuth>
|
||||
<SatelliteRange> </SatelliteRange>
|
||||
</ObservationGeometry>
|
||||
<BandSelection>1</BandSelection>
|
||||
<DataSourceDescription>None</DataSourceDescription>
|
||||
<DataSourceProcessingDescription>参考产品介绍PDF</DataSourceProcessingDescription>
|
||||
<ProductionDate> </ProductionDate>
|
||||
<AuxiliaryDataDescription> </AuxiliaryDataDescription>
|
||||
</ProductProductionInfo>
|
||||
<ProductPublishInfo>
|
||||
<Processor>德清</Processor>
|
||||
<DistributionUnit> </DistributionUnit>
|
||||
<ContactInformation> </ContactInformation>
|
||||
</ProductPublishInfo>
|
||||
</Root>
|
Binary file not shown.
|
@ -0,0 +1,47 @@
|
|||
''' 去地平效应功能代码
|
||||
因为去地平效应是需要像元的成像时间作为的输入参数量,因此需要原始影像的SLC作为输出项,或者原始的SLC中,保留了影像的成像时刻。
|
||||
作为模型约定:
|
||||
1.模型的输入需要已经经过了正射校正,否则没有办法与已有的DEM进行匹配。
|
||||
2.开发示例使用的高分三号,注意高分三号的正射模型为rpc,因此需要需要计算的对应的时间。
|
||||
'''
|
||||
import os
|
||||
import numpy as np
|
||||
# import gdal
|
||||
import math
|
||||
|
||||
class DeLevelingEffect:
|
||||
''' 去地平效应
|
||||
计算公式:
|
||||
Phi=(4*pi/lambda_)*(r1-r2)
|
||||
lambda_ 波长
|
||||
pi=3.14159265358979323846264338327950288
|
||||
r1:主
|
||||
r2:辅
|
||||
'''
|
||||
def LevelingEffect(lamda_,r1,r2,pi=3.14159265358979323846264338327950288):
|
||||
'''计算地平效应的相位
|
||||
agrs:
|
||||
lamda_: double, 波长
|
||||
r1:shape nx1 ,主影像的斜距
|
||||
r2:shape nx1 ,辅影像的斜距
|
||||
pi:double,圆周率
|
||||
return:
|
||||
Phi:shape nx1
|
||||
'''
|
||||
Phi=(4*pi/lamda_)*(r1-r2)
|
||||
return Phi
|
||||
|
||||
def CalSlantDistance(Rs_Salatellite,Rs_SeaLevel):
|
||||
'''计算斜率
|
||||
agrs:
|
||||
Rs_Salatellite:nx3 x,y,z 地面对应时刻的 卫星位置和速度
|
||||
Rs_SeaLevel:nx3 x,y,z 地面的坐标
|
||||
return:
|
||||
SlantDistance:nx1 斜距
|
||||
'''
|
||||
SlantDistance=np.sqrt(np.sum((Rs_Salatellite-Rs_SeaLevel)**2,axis=1)).reshape(-1,1)
|
||||
return SlantDistance
|
||||
|
||||
# 根据根据卫星轨道数据,构建合适的影像信息
|
||||
|
||||
pass
|
File diff suppressed because it is too large
Load Diff
|
@ -0,0 +1,24 @@
|
|||
"""
|
||||
@Project :microproduct
|
||||
@File :OnePlantHeight.PY
|
||||
@Function :主函数
|
||||
@Author :LMM
|
||||
@Date :2021/10/19 14:39
|
||||
@Version :1.0.0
|
||||
"""
|
||||
|
||||
import numpy as np
|
||||
from scipy.special import lambertw
|
||||
H_arr=np.array([1,2])
|
||||
array_angle=np.array([1,0.5])
|
||||
a = 0.5 * H_arr * H_arr
|
||||
b = -0.5 * np.sin(array_angle)
|
||||
y = 2
|
||||
|
||||
|
||||
|
||||
a1 = 2 * lambertw(-b * np.sqrt(y / a) / 2) / b
|
||||
print(a1)
|
||||
pass
|
||||
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
import os
|
||||
import sys
|
||||
import cv2
|
||||
import numpy as np
|
||||
|
||||
img1 = cv2.imread('D:\MicroSAR\C-SAR\Ortho\Ortho\Temporary\dem_rcs.jpg', 0)
|
||||
img2 = cv2.imread('D:\MicroSAR\C-SAR\Ortho\Ortho\Temporary\sar_rcs.jpg', 0)
|
||||
def cv_show(name,img):
|
||||
cv2.imshow(name, img)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
sift = cv2.xfeatures2d.SIFT_create()
|
||||
kp1, des1 = sift.detectAndCompute(img1, None)
|
||||
kp2, des2 = sift.detectAndCompute(img2, None)
|
||||
bf = cv2.BFMatcher(crossCheck=True)
|
||||
matches = bf.match(des1, des2)
|
||||
matches = sorted(matches, key=lambda x: x.distance)
|
||||
img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None,flags=2)
|
||||
cv_show('img3',img3)
|
|
@ -0,0 +1,30 @@
|
|||
|
||||
def cal(base_block,i,j):
|
||||
|
||||
std=np.std(base_block)
|
||||
dist=np.max(base_block)-np.min(base_block)
|
||||
return [i,j,std,dist]
|
||||
|
||||
|
||||
plist=[]
|
||||
with Pool() as pool:
|
||||
for i in range(height):
|
||||
print("\r",i,"/",height," ",end="")
|
||||
if i<50 or i>height-51:
|
||||
continue
|
||||
for j in range(width):
|
||||
if j<50 or j>width-51:
|
||||
continue
|
||||
base_block=data[i-50:i+50,j-50:j+50]
|
||||
plist.append(pool.apply_async(cal,args=(base,i,j,)))
|
||||
pool.close()
|
||||
pool.join()
|
||||
for p in plist:
|
||||
result=p.get()
|
||||
[i,j,std,dist]=result
|
||||
base_std[i,j]=std
|
||||
base_dist[i,j]=dist
|
||||
|
||||
base_std.astype(np.float32).tofile(r"D:\MicroSAR\C-SAR\Ortho\Ortho\Temporary\HH_std.bin")
|
||||
base_dist.astype(np.float32).tofile(r"D:\MicroSAR\C-SAR\Ortho\Ortho\Temporary\HH_dist.bin")
|
||||
print(base_dist.shape,base_dist.dtype)
|
|
@ -0,0 +1,113 @@
|
|||
import logging
|
||||
# from re import S
|
||||
# from oneOrthoAuxData import OrthoAuxData
|
||||
# from OrthoImage import ImageHandler
|
||||
from tool.algorithm.image.ImageHandle import ImageHandler
|
||||
import tarfile
|
||||
# from OrthoDB import ManageAlgXML, CheckSource
|
||||
from tool.algorithm.xml.AlgXmlHandle import ManageAlgXML, CheckSource # 导入xml文件读取与检查文件
|
||||
from OrthoAlg import IndirectOrthorectification, DEMProcess,ImageMatchClass
|
||||
# from logHandler import LogHandler
|
||||
from tool.algorithm.algtools.logHandler import LogHandler
|
||||
from tool.algorithm.xml.CreatMetafile import CreateMetafile
|
||||
from OrthoXmlInfo import CreateDict, CreateStadardXmlFile
|
||||
from osgeo import gdal, osr
|
||||
import os
|
||||
import glob
|
||||
import gc
|
||||
import datetime
|
||||
import shutil
|
||||
import sys
|
||||
import cv2
|
||||
|
||||
ori_sar_path="D:\MicroWorkspace\C-SAR\Ortho\Temporary\TestSAR\GF3_SAY_QPSI_013952_E118.9_N31.5_20190404_L1A_HH_L10003923848.jpg"
|
||||
sim_sar_path="D:\MicroWorkspace\C-SAR\Ortho\Temporary\TestSim\sim_img_sum.jpg"
|
||||
work_sapce_path="D:\MicroWorkspace\C-SAR\Ortho\Temporary"
|
||||
|
||||
'''
|
||||
import matplotlib.pyplot as plt
|
||||
img1 = cv2.imread(ori_sar_path, 0)
|
||||
img2 = cv2.imread(sim_sar_path, 0)
|
||||
def cv_show(name,img):
|
||||
cv2.imshow(name, img)
|
||||
cv2.waitKey(0)
|
||||
cv2.destroyAllWindows()
|
||||
sift = cv2.SIFT_create()
|
||||
kp1, des1 = sift.detectAndCompute(img1, None)
|
||||
kp2, des2 = sift.detectAndCompute(img2, None)
|
||||
bf = cv2.BFMatcher(crossCheck=True)
|
||||
matches = bf.match(des1, des2)
|
||||
matches = sorted(matches, key=lambda x: x.distance)
|
||||
img3 = cv2.drawMatches(img1, kp1, img2, kp2, matches[:10], None,flags=2)
|
||||
cv2.imwrite(work_sapce_path,img3)
|
||||
'''
|
||||
|
||||
# 匹配影像
|
||||
def ImageMatch(ori_sar_path,sim_sar_path,work_sapce_path):
|
||||
ori_sar=gdal.Open(ori_sar_path)
|
||||
sim_sar=gdal.Open(sim_sar_path)
|
||||
# 影像尺寸
|
||||
ori_height=ori_sar.RasterYSize
|
||||
ori_width=ori_sar.RasterXSize
|
||||
sim_height=sim_sar.RasterYSize
|
||||
sim_width=sim_sar.RasterXSize
|
||||
# 分块匹配
|
||||
ori_sar_arr=ori_sar.GetRasterBand(1).ReadAsArray(0,0,ori_width,ori_height) # 原始影像
|
||||
ori_img=(255*ori_sar_arr/np.max(ori_sar_arr)).astype(np.uint8)
|
||||
|
||||
sim_sar_arr=np.log(sim_sar.GetRasterBand(1).ReadAsArray(0,0,sim_width,sim_height)+1) # 模拟影像
|
||||
sim_img=(1+253*sim_sar_arr/np.max(sim_sar_arr)).astype(np.uint8)
|
||||
|
||||
res = cv.matchTemplate(img,template,method)
|
||||
min_val, max_val, min_loc, max_loc = cv.minMaxLoc(res)
|
||||
top_left = max_loc
|
||||
# 范围
|
||||
min_w=top_left[0] if top_left[0]>0 else 0
|
||||
min_h=top_left[1] if top_left[1]>0 else 0
|
||||
|
||||
max_w=top_left[0]+ori_width if top_left[0]+ori_width<sim_width else sim_width
|
||||
max_h=top_left[1]+ori_height if top_left[0]+ori_height<sim_height else sim_height
|
||||
# 裁剪
|
||||
sim_clip=sim_img[min_h:max_h,min_w:max_w]
|
||||
|
||||
for i in range(0,ori_img.shape[0],10):
|
||||
for j in range(0,ori_img.shape[1],10):
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import cv2 as cv
|
||||
import numpy as np
|
||||
from matplotlib import pyplot as plt
|
||||
img = cv.imread(sim_sar_path,0)
|
||||
img2 = img.copy()
|
||||
template = cv.imread(ori_sar_path,0)
|
||||
w, h = template.shape[::-1]
|
||||
# 列表中所有的6种比较方法
|
||||
methods = ['cv.TM_CCOEFF', 'cv.TM_CCOEFF_NORMED', 'cv.TM_CCORR',
|
||||
'cv.TM_CCORR_NORMED', 'cv.TM_SQDIFF', 'cv.TM_SQDIFF_NORMED']
|
||||
i=0
|
||||
for meth in methods:
|
||||
img = img2.copy()
|
||||
method = eval(meth)
|
||||
# 应用模板匹配
|
||||
res = cv.matchTemplate(img,template,method)
|
||||
min_val, max_val, min_loc, max_loc = cv.minMaxLoc(res)
|
||||
# 如果方法是TM_SQDIFF或TM_SQDIFF_NORMED,则取最小值
|
||||
if method in [cv.TM_SQDIFF, cv.TM_SQDIFF_NORMED]:
|
||||
top_left = min_loc
|
||||
else:
|
||||
top_left = max_loc
|
||||
bottom_right = (top_left[0] + w, top_left[1] + h)
|
||||
cv.rectangle(img,top_left, bottom_right, 255, 2)
|
||||
cv.imwrite(os.path.join(work_sapce_path,"matchresult_{}.jpg".format(i)),img)
|
||||
i=i+1
|
|
@ -0,0 +1,8 @@
|
|||
import geo_rpc
|
||||
import os
|
||||
import numpy as np
|
||||
|
||||
from osgeo import gdal
|
||||
rpc = geo_rpc.read_rpc_file(r'D:\MicroSAR\C-SAR\Ortho\Temporary\unpack\GF3B_MYC_FSI_002233_E120.8_N30.2_20220426_L1A_VHVV_L10000030053\GF3B_MYC_FSI_002233_E120.8_N30.2_20220426_L1A_VV_L10000030053.rpb')
|
||||
rows ,cols = rpc.localization(19820,14284,0)
|
||||
print(rows,cols)
|
|
@ -0,0 +1,4 @@
|
|||
cd E:\0test\oneOrtho
|
||||
打包成一个文件: pyinstaller -D packing.spec
|
||||
|
||||
pyinstaller -F --add-data "D:/Anaconda/envs/micro/Lib/site-packages/dask/dask.yaml;./dask" --add-data "D:/Anaconda/envs/micro/Lib/site-packages/distributed/distributed.yaml;./distributed" --hidden-import pyproj._compat
|
Loading…
Reference in New Issue