LT1AB重复轨道适配

LT1AB
剑古敛锋 2023-12-06 17:44:22 +08:00
parent ba42e3ebb0
commit a8137a23f6
30 changed files with 5787 additions and 203 deletions

144
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,144 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: prepSlcLT1AB File",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/prepSlcLT1AB.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-i",
"/mnt/d/LTInsar/download/",
"-o",
"/mnt/d/LTInsar/slc/",
"--linux"
]
},
{
"name": "Python: DEM2ISCE File",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/DEM2ISCE.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-s",
"/mnt/d/LTInsar/dem/",
"-o",
"/mnt/d/LTInsar/demISCE/"
]
},
{
"name": "Python: stackStripMap File",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stackStripMap.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-s",
"/mnt/d/LTInsar/slc/",
"-d",
"/mnt/d/LTInsar/demISCE/demLat_N47_N49_Lon_E130_E132.dem.wgs84",
"-m",
"20230327",
"-x",
"47.65 47.73 130.83 130.91",
"-w",
"/mnt/d/LTInsar/work/",
"-u",
"snaphu",
"--nofocus"
]
},
{
"name": "Python: run 01 20230327",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_crop_20230327"
]
},
{
"name": "Python: run 01 20230404",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_crop_20230404"
]
},
{
"name": "Python: run 02",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_reference_20230523"
]
},
{
"name": "Python: run 08 20230404",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_baselinegrid_20230404"
]
},
{
"name": "Python: run 08 20230327",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_baselinegrid_20230327"
]
},
{
"name": "Python: run 07 20230404",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_fineResamp_20230404"
]
},
{
"name": "Python: run 09",
"type": "python",
"request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal",
"justMyCode": false,
"args": [
"-c",
"/mnt/d/LTInsar/work/configs/config_igram_20230327_20230404"
]
}
]
}

View File

@ -87,7 +87,7 @@ class DEM2ISCE(Component):
demImage = createDemImage()
width = self.getDemWidth(outname)
height=self.getDemHeight(outname)
demImage.initImage(outname,'read',width,type="float")
demImage.initImage(outname,'write',width,type="float")
length = demImage.getLength()
# 获取分辨率
geotransform=self.getGeotransform(outname)

View File

@ -234,6 +234,15 @@ class Orbit(Component):
self._orbitSource = source or None
self._referenceFrame = None
self._stateVectors.configure()
""" 自定义 """
self.oribitStartTime=None
self.A_arr=None
self.polynum=None
""" LT1AB 多普勒参数"""
self.refrenceTime=None
self.dopperPoly=[]
""" 卫星名称"""
self.sessionMode=None
#self._stateVectors = stateVectors or []
self._cpStateVectors = []
type(self._stateVectors)
@ -260,6 +269,67 @@ class Orbit(Component):
# #pack the orbit into stateVectors
# self._packOrbit(cpStateVectors[0], cpStateVectors[1], cpStateVectors[2], cpStateVectors[3])
def setsessionMode(self,SessionMode):
self.sessionMode=SessionMode
def setPolyParams(self,polynum,orbitStartTime,A_arr):
self.polynum=polynum
self.oribitStartTime=orbitStartTime
self.A_arr=A_arr
def setDoppler(self,refrenceTime,dopperPoly):
self.refrenceTime=refrenceTime
self.dopperPoly=dopperPoly
pass
def getSatelliteSpaceState(self, time_float_datetime):
'''
逐像素求解
根据时间戳返回对应时间的卫星的轨迹状态会自动计算与起算时间之差
args:
time_float:时间戳
return
State_list:[time,Xp,Yp,Zp,Vx,Vy,Vz]
'''
time_float=time_float_datetime.timestamp()
time_float = time_float - self.oribitStartTime
#
px=0
py=0
pz=0
vx=0
vy=0
vz=0
for ii in range(self.polynum):
px+=self.A_arr[ii][0]*time_float**ii
py+=self.A_arr[ii][1]*time_float**ii
pz+=self.A_arr[ii][2]*time_float**ii
vx+=self.A_arr[ii][3]*time_float**ii
vy+=self.A_arr[ii][4]*time_float**ii
vz+=self.A_arr[ii][5]*time_float**ii
sv= StateVector()
sv.setTime(time_float_datetime)
sv.setPosition([px,py,pz])
sv.setVelocity([vx,vy,vz])
return sv
def getTimeOrbits(self,UTCStartTime,UTCEndTime,orbitnum=1000):
#
startTime_stamp=datetime.datetime.strptime(UTCStartTime,"%Y-%m-%dT%H:%M:%S.%f") - datetime.timedelta(seconds=30)
endTime_stamp=datetime.datetime.strptime(UTCEndTime,"%Y-%m-%dT%H:%M:%S.%f") + datetime.timedelta(seconds=30)
if startTime_stamp>endTime_stamp:
raise
delta_t=(endTime_stamp.timestamp()-startTime_stamp.timestamp())/orbitnum
extractOrbits=[]
#
temptime=startTime_stamp
while temptime<endTime_stamp:
temptime=temptime+ datetime.timedelta(seconds=delta_t)
newOrbit=self.getSatelliteSpaceState(temptime)
extractOrbits.append(newOrbit)
newOrbit=self.getSatelliteSpaceState(endTime_stamp)
extractOrbits.append(newOrbit)
return extractOrbits # 扩展的轨道节点
def adaptToRender(self):
import copy
# make a copy of the stateVectors to restore it after dumping
@ -376,38 +446,45 @@ class Orbit(Component):
@raises NotImplementedError: if the desired interpolation method
cannot be decoded
"""
if time not in self:
raise ValueError(
"Time stamp (%s) falls outside of the interpolation interval [%s:%s]" %
(time,self.minTime,self.maxTime)
)
if self.sessionMode is None:
if time not in self:
raise ValueError(
"Time stamp (%s) falls outside of the interpolation interval [%s:%s]" %(time,self.minTime,self.maxTime)
)
if method == 'linear':
newSV = self._linearOrbitInterpolation(time)
elif method == 'legendre':
newSV = self._legendreOrbitInterpolation(time)
elif method == 'hermite':
newSV = self._hermiteOrbitInterpolation(time)
else:
if method == 'linear':
newSV = self._linearOrbitInterpolation(time)
elif method == 'legendre':
newSV = self._legendreOrbitInterpolation(time)
elif method == 'hermite':
newSV = self._hermiteOrbitInterpolation(time)
else:
raise NotImplementedError(
"Orbit interpolation type %s, is not implemented" % method
)
return newSV
elif self.sessionMode=="LT1B" or self.setsessionMode=="LT1A":
return self.getSatelliteSpaceState(time)
## Isn't orbit redundant? -compute the method based on name
def interpolate(self, time, method='linear'):
if self.sessionMode is None:
if time not in self:
raise ValueError("Time stamp (%s) falls outside of the interpolation interval [%s:%s]"
% (time,self.minTime,self.maxTime))
try:
return getattr(self, '_'+method+'OrbitInterpolation')(time)
except AttributeError:
pass
raise NotImplementedError(
"Orbit interpolation type %s, is not implemented" % method
)
return newSV
elif self.sessionMode=="LT1B" or self.setsessionMode=="LT1A":
return self.getSatelliteSpaceState(time)
## Isn't orbit redundant? -compute the method based on name
def interpolate(self, time, method='linear'):
if time not in self:
raise ValueError("Time stamp (%s) falls outside of the interpolation interval [%s:%s]"
% (time,self.minTime,self.maxTime))
try:
return getattr(self, '_'+method+'OrbitInterpolation')(time)
except AttributeError:
pass
raise NotImplementedError(
"Orbit interpolation type %s, is not implemented" % method
)
interpolateOrbit = interpolate
# interpolateOrbit = interpolate #暂时注释----------------------------------------------------------------------------
def _linearOrbitInterpolation(self,time):
"""
@ -558,7 +635,6 @@ class Orbit(Component):
"Fewer than 4 state vectors present in orbit, cannot interpolate"
)
return None
# The Fortran routine assumes that it is getting an array of four
# state vectors
try:
@ -579,7 +655,6 @@ class Orbit(Component):
obsTime, obsPos, obsVel,offset = newOrbit.to_tuple(
relativeTo=self.minTime
)
td = time - self.minTime
ansTime = DTU.timeDeltaToSeconds(td)
flatObsPos = [item for sublist in obsPos for item in sublist]
@ -601,6 +676,8 @@ class Orbit(Component):
ansPos_C,
ansVel_C)
# print('插值成功----------------------------')
# print(StateVector(time=time, position=ansPos_C[:], velocity=ansVel_C[:]))
return StateVector(time=time, position=ansPos_C[:], velocity=ansVel_C[:])
## This need to be public -very confusing since there is an __iter__
@ -998,7 +1075,7 @@ class Orbit(Component):
pointOnGround = rdr2geo
def geo2rdr(self, llh, side=-1, planet=None,
doppler=None, wvl=None):
doppler=None, wvl=None,isLT1AB=True):
'''
Takes a lat, lon, height triplet and returns azimuth time and range.
Assumes zero doppler for now.
@ -1014,45 +1091,119 @@ class Orbit(Component):
refElp = Planet(pname='Earth'). ellipsoid
else:
refElp = planet.ellipsoid
# print('llh', llh)
xyz = refElp.llh_to_xyz(llh)
delta = (self.maxTime - self.minTime).total_seconds() * 0.5
tguess = self.minTime + datetime.timedelta(seconds = delta)
outOfBounds = False
# Start the previous guess tracking with dummy value
t_prev_guess = tguess + datetime.timedelta(seconds=10)
for ii in range(51):
try:
sv = self.interpolateOrbit(tguess, method='hermite')
except:
outOfBounds = True
break
tguess = self.minTime #+ datetime.timedelta(seconds = delta)
# print("Orbits.py 1024-----------------------------------------------")
# print("self.maxTime", self.maxTime)
# print("self.minTime", self.minTime)
# print(delta)
# print(tguess)
LIGHTSPEED=299792458
if wvl==0:
isLT1AB=False
if isLT1AB and (self.sessionMode=="LT1A" or self.sessionMode=="LT1B") : # 专门针对 LT1AB
print("LT1AB orbit.....")
dt=0.0001
outOfBounds = False
for ii in range(51):
try:
sv= self.getSatelliteSpaceState(tguess+datetime.timedelta(seconds= dt)) # 获取卫星的 位置、速度
except Exception as e:
print(e)
outOfBounds = True
break
pos1 = np.array(sv.getPosition()) # 卫星坐标
vel1 = np.array(sv.getVelocity()) # 卫星速度
dr1 = pos1-xyz
rng1 = np.linalg.norm(dr1) # 斜距
# ((R_s1.array() * V_s1.array()).rowwise().sum().array() * (-2) / (R * this->lamda))[0];
FdTheory1 = -2/(rng1*wvl)*np.dot(dr1,vel1)
try:
sv= self.getSatelliteSpaceState(tguess)
except Exception as e:
print(e)
outOfBounds = True
break
pos2 = np.array(sv.getPosition()) # 卫星坐标
vel2 = np.array(sv.getVelocity()) # 卫星速度
dr2 = pos2-xyz
rng = np.linalg.norm(dr2) # 斜距
FdTheory2= -2/(rng*wvl)*np.dot(dr2,vel2)
TSR= rng * 2 / LIGHTSPEED - self.refrenceTime # nx1
FdNumerical=0
# FdNumerical=FdNumerical+self.dopperPoly[0]*TSR**0
# FdNumerical=FdNumerical+self.dopperPoly[1]*TSR**1
# FdNumerical=FdNumerical+self.dopperPoly[2]*TSR**2
# FdNumerical=FdNumerical+self.dopperPoly[3]*TSR**3
fdopper_grad=(FdTheory1 - FdTheory2)/dt
inc_t = (FdTheory2-FdNumerical) /fdopper_grad
# print(inc_t,rng,FdNumerical,FdTheory2,tguess,pos2)
tguess = tguess - datetime.timedelta(seconds = inc_t)
if abs(inc_t) < 1e-6:
break
else:
t_prev_guess = tguess
# print(outOfBounds)
# print("end ------------------------------------------------------------\n")
if outOfBounds:
raise Exception('Interpolation time out of bounds')
else:
outOfBounds = False
for ii in range(51):
try:
sv = self.interpolateOrbit(tguess, method='hermite')
except Exception as e:
if self.sessionMode=="LT1A" or self.sessionMode=="LT1B":
sv = self.getSatelliteSpaceState(tguess) # 获取卫星的 位置、速度
print(e)
outOfBounds = True
break
pos = np.array(sv.getPosition())
vel = np.array(sv.getVelocity())
pos = np.array(sv.getPosition())
vel = np.array(sv.getVelocity())
# print("xyz", xyz)
# print("pos", pos)
dr = xyz-pos
rng = np.linalg.norm(dr) # 求斜距
# print("rng", rng)
dr = xyz-pos
rng = np.linalg.norm(dr)
dopfact = np.dot(dr,vel) # fd 公式
# print("dopfact", dopfact)
dopfact = np.dot(dr,vel)
fdop = doppler(DTU.seconds_since_midnight(tguess),rng) * wvl * 0.5
fdopder = (0.5*wvl*doppler(DTU.seconds_since_midnight(tguess),rng+10.0) - fdop) / 10.0
fdop = doppler(DTU.seconds_since_midnight(tguess),rng)* wvl * 0.5
# print("doppler", doppler(DTU.seconds_since_midnight(tguess),rng))
# print("wvl", wvl)
# print("fdop", fdop)
fdopder = (0.5*wvl*doppler(DTU.seconds_since_midnight(tguess),rng+10.0) - fdop) / 10.0
# print("doppler2", doppler(DTU.seconds_since_midnight(tguess),rng+10.0))
# print("fdopder", fdopder)
fn = dopfact - fdop * rng
c1 = -np.dot(vel, vel)
c2 = (fdop/rng + fdopder)
# print("c1", c1)
# print("c2", c2)
fnprime = c1 + c2 * dopfact
# print("时间为", fn/fnprime)
# if abs(fn/fnprime) > 1:
# break
tguess = tguess - datetime.timedelta(seconds = fn/fnprime)
# print("输出的tguess", tguess)
# print(outOfBounds)
# print("end ------------------------------------------------------------\n")
if outOfBounds:
raise Exception('Interpolation time out of bounds')
fn = dopfact - fdop * rng
c1 = -np.dot(vel, vel)
c2 = (fdop/rng + fdopder)
fnprime = c1 + c2 * dopfact
tguess = tguess - datetime.timedelta(seconds = fn/fnprime)
if abs(tguess - t_prev_guess).total_seconds() < 5e-9:
break
else:
t_prev_guess = tguess
if outOfBounds:
raise Exception('Interpolation time out of bounds')
return tguess, rng

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@ -123,7 +123,7 @@ class Radarsat2(Sensor):
frequency = self.product.sourceAttributes.radarParameters.radarCenterFrequency
orig_prf = self.product.sourceAttributes.radarParameters.prf # original PRF not necessarily effective PRF
rangePixelSize = self.product.imageAttributes.rasterAttributes.sampledPixelSpacing
rangeSamplingRate = Const.c/(2*rangePixelSize)
rangeSamplingRate = Const.c/(2*rangePixelSize) # eqvFs
pulseLength = self.product.sourceAttributes.radarParameters.pulseLengths[0]
pulseBandwidth = self.product.sourceAttributes.radarParameters.pulseBandwidths[0]
polarization = self.product.sourceAttributes.radarParameters.polarizations
@ -587,7 +587,7 @@ class _RadarParameters(Radarsat2Namespace):
retstr = "RadarParameters:"+sep+tab
retlst = ()
retstr += "acquisitionType=%s"+sep+tab
retlst += (self.acquisitionType,)
retlst += (self.acquisitionType,)
retstr += "beams=%s"+sep+tab
retlst += (self.beams,)
retstr += "polarizations=%s"+sep+tab

View File

@ -99,6 +99,8 @@ createSICD_RGZERO = partial(factory_template, 'SICD_RGZERO')
createICEYE_SLC = partial(factory_template, 'ICEYE_SLC')
createUAVSAR_Hdf5_SLC = partial(factory_template, 'UAVSAR_HDF5_SLC')
createSAOCOM_SLC = partial(factory_template, 'SAOCOM_SLC')
createGF3_SLC = partial(factory_template, 'GF3_SLC')
createLT1ABRepeat = partial(factory_template, 'LT1ABLT1ABREPEAT')
SENSORS = {'ALOS' : createALOS,
'ALOS_SLC' : createALOS_SLC,
@ -125,7 +127,9 @@ SENSORS = {'ALOS' : createALOS,
'SICD_RGZERO' : createSICD_RGZERO,
'ICEYE_SLC' : createICEYE_SLC,
'UAVSAR_HDF5_SLC' : createUAVSAR_Hdf5_SLC,
'SAOCOM_SLC': createSAOCOM_SLC}
'SAOCOM_SLC': createSAOCOM_SLC,
'GF3_SLC' : createGF3_SLC,
'LT1ABLT1ABREPEAT' : createLT1ABRepeat}
#These are experimental and can be added in as they become ready
# 'JERS': createJERS,

View File

@ -305,4 +305,5 @@
DEALLOCATE( r_bamp, r_xofr, b_patchTrees, b_all_unwrap )
write(*,'(/a/)') '*** Normal Completion ***'
write(*,'(/1x,a/)') '<< PS filtering end >>'
end

View File

@ -79,9 +79,11 @@ class Looks(Component):
inPtr = inImage.getImagePointer()
outPtr = outImage.getImagePointer()
looks.looks_Py(inPtr, outPtr, self.downLooks, self.acrossLooks, inImage.dataType.upper())
inImage.finalizeImage()
# 创建ENVI ,格式修改为 BIL
outImage.finalizeImage()
outImage.renderHdr()

View File

@ -0,0 +1,216 @@
#!/usr/bin/env python3
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# LAMP License
#
# Author: chenzenghui
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 自定义dem管理
# 1. 创建二进制文件 ENVI hdr
# 2. 读取文件构建.vrt .xml
import argparse
import isce
from ctypes import cdll, c_char_p, c_int, byref
from array import array
import struct
import zipfile
import os
import sys
import math
import urllib.request, urllib.parse, urllib.error
import numpy as np
from isce import logging
from iscesys.Component.Component import Component
from isceobj.Image import createDemImage
from osgeo import gdal,osr,ogr
import xml.etree.ElementTree as ET
from html.parser import HTMLParser
import time
env_str = os.path.dirname(os.path.abspath(sys.argv[0]))
os.environ['PROJ_LIB'] = env_str
class DEM2ISCE(Component):
def dem_merged(self,in_dem_path, out_dem_path):
'''
DEM重采样函数默认坐标系为WGS84
agrs:
in_dem_path: 输入的DEM文件夹路径
meta_file_path: 输入的xml元文件路径
out_dem_path: 输出的DEM文件夹路径
'''
# 读取文件夹中所有的DEM
# dem_file_paths = in_dem_path
lons_min = []
lons_max = []
lats_min = []
lats_max = []
dem_file_paths=[os.path.join(in_dem_path,dem_name) for dem_name in os.listdir(in_dem_path) if (dem_name.find(".tif")>=0 or dem_name.find(".tiff")>=0) and dem_name.find(".tif.")==-1]
spatialreference=osr.SpatialReference()
spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree
spatialproj=spatialreference.ExportToWkt() # 导出投影结果
# 将DEM拼接成一张大图
for file in dem_file_paths:
dataset = gdal.Open(file)
transform = dataset.GetGeoTransform()
im_width = dataset.RasterXSize
im_height = dataset.RasterYSize
lon_right = transform[0] + transform[1] * im_width
lat_down = transform[3] + transform[5] * im_height
lons_min.append(float(transform[0]))
lons_max.append(float(lon_right))
lats_min.append(float(lat_down))
lats_max.append(float(transform[3]))
lon_min = round(np.min(lons_min))
lon_max = round(np.max(lons_max))
lat_min = round(np.min(lats_min))
lat_max = round(np.max(lats_max))
all_file_name = 'demLat_N' + str(lat_min) + '_N' + str(lat_max) + '_Lon_E' + str(lon_min) + '_E' + str(lon_max) + '.tiff'
# print("lon_min :{} lon_max :{}, lat_min :{}, lat_max: {}".format(lon_min, lon_max, lat_min, lat_max))
# print(file_name)
out_DEM_path=os.path.join(out_dem_path, all_file_name)
# out_DEM=out_dem_path
gdal.Warp(out_DEM_path,
dem_file_paths,
format="GTiff",
dstSRS=spatialproj,
dstNodata=self._NoDataValue,
outputType=gdal.GDT_Float32)
file_name_vrt = 'demLat_N' + str(lat_min) + '_N' + str(lat_max) + '_Lon_E' + str(lon_min) + '_E' + str(lon_max) + '.dem.wgs84.vrt'
file_name = 'demLat_N' + str(lat_min) + '_N' + str(lat_max) + '_Lon_E' + str(lon_min) + '_E' + str(lon_max) + '.dem.wgs84'
# print("lon_min :{} lon_max :{}, lat_min :{}, lat_max: {}".format(lon_min, lon_max, lat_min, lat_max))
# print(file_name)
mergeFile =gdal.BuildVRT(os.path.join(out_dem_path,file_name_vrt),out_DEM_path)
out_DEM=os.path.join(out_dem_path, file_name)
# out_DEM=out_dem_path
gdal.Warp(out_DEM,
mergeFile,
format="ENVI",
dstSRS=spatialproj,
dstNodata=self._NoDataValue,
outputType=gdal.GDT_Float32)
time.sleep(3)
return out_DEM
#this method also create an actual DeimImage object that is returned by the getImage() method
def createXmlMetadata(self,outname):
demImage = self.createImage(outname)
demImage.renderHdr()
def getDemWidth(self,outname):
gdal.AllRegister()
dataset=gdal.Open(outname)
width=dataset.RasterXSize
del dataset
return width
def getDemHeight(self,outname):
gdal.AllRegister()
dataset=gdal.Open(outname)
height=dataset.RasterYSize
del dataset
return height
def getGeotransform(self,outname):
gdal.AllRegister()
dataset=gdal.Open(outname)
geotransform = dataset.GetGeoTransform()
del dataset
return geotransform
def createImage(self,outname):
demImage = createDemImage()
width = self.getDemWidth(outname)
height=self.getDemHeight(outname)
demImage.initImage(outname,'read',width,type="float")
length = demImage.getLength()
# 获取分辨率
geotransform=self.getGeotransform(outname)
dictProp = {'METADATA_LOCATION':outname+'.xml','REFERENCE':self._reference,'Coordinate1':{'size':width,'startingValue':geotransform[0],'delta':geotransform[1]},'Coordinate2':{'size':length,'startingValue':geotransform[3],'delta':geotransform[5]},'FILE_NAME':outname}
#no need to pass the dictionaryOfFacilities since init will use the default one
demImage.init(dictProp)
self._image = demImage
return demImage
def setFillingValue(self,val):
self._fillingValue = val
def setNoDataValue(self,val):
self._NoDataValue = val
def stitchDems(self,source, outname):
import glob
# 合并数据
out_dem = self.dem_merged(source, outname)
self.createXmlMetadata(out_dem)
family = 'DEM2ISCE'
def __init__(self,family = '', name = ''):
self._extension = '.tif'
self._zip = '.zip'
#to make it working with other urls, make sure that the second part of the url
#it's /srtm/version2_1/SRTM(1,3)
self._filters = {'region1':['Region'],'region3':['Africa','Australia','Eurasia','Islands','America'],'fileExtension':['.hgt.zip']}
self._remove = ['.jpg']
self._metadataFilename = 'fileDem.dem'
self._createXmlMetadata = None
self._createRscMetadata = None
self._regionList = {'1':[],'3':[]}
##self._keepDems = False
self._fillingFilename = 'filling.hgt' # synthetic tile to cover holes
##self._fillingValue = -32768 # fill the synthetic tile with this value
##self._noFilling = False
self._failed = 'failed'
self._succeded = 'succeded'
self._image = None
self._reference = 'EGM96'
super(DEM2ISCE, self).__init__(family if family else self.__class__.family, name=name)
# logger not defined until baseclass is called
if not self.logger:
self.logger = logging.getLogger('isce.contrib.demUtils.DEM2ISCE')
def getImage(self):
return self._image
# DEM转换主流程
def processDEM2ISCE(name,source_path,target_path,fillvalue,noDataValue):
ds = DEM2ISCE(name=name)
# 构建
ds.setFillingValue(fillvalue)
ds.setNoDataValue(noDataValue)
ds.stitchDems(source_path,target_path)
def main():
#if not argument provided force the --help flag
if(len(sys.argv) == 1):
sys.argv.append('-h')
# Use the epilog to add usage examples
epilog = '将格式为tif 的DEM 转换为ISCE 支持的DEM格式:\n\n'
epilog += 'Usage examples:\n\n'
epilog += 'DEM2ISCE.py -s /mnt/d/codestorage/isce2/青海省.tif -o /mnt/d/codestorage/isce2/青海省_wgs84 -fillvalue -9999 -Nodata -9999\n\n'
#set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog)
parser.add_argument('-s', '--source', type = str, default ="/mnt/d/codestorage/isce2/青海省.tif", dest = 'source_path', help = '输入dem,格式为tif')
parser.add_argument('-o', '--outpath', type = str, default = '/mnt/d/codestorage/isce2/青海省_wgs84', dest = 'outpath', help = '输出isce 支持的DEM ')
parser.add_argument('-fillvalue', '--fillvalue', type = float, default = -9999, dest = 'fillvalue', help = '空值填充')
parser.add_argument('-Nodata', '--Nodata', type = float, default = -9999, dest = 'Nodatavalue', help = '无效值填充')
args = parser.parse_args()
processDEM2ISCE("DEM2ISCE",args.source_path,args.outpath,args.fillvalue,args.Nodatavalue)
print("DEM==>ISCE ok")
return -1
if __name__ == '__main__':
main()

View File

@ -0,0 +1,691 @@
"""
@Project microproduct
@File ImageHandle.py
@Function 实现对待处理SAR数据的读取格式标准化和处理完后保存文件功能
@Author LMM
@Date 2021/10/19 14:39
@Version 1.0.0
"""
import os
from PIL import Image
from osgeo import gdal
from osgeo import osr
import numpy as np
from PIL import Image
import cv2
import logging
import math
logger = logging.getLogger("mylog")
class ImageHandler:
"""
影像读取编辑保存
"""
def __init__(self):
pass
@staticmethod
def get_dataset(filename):
"""
:param filename: tif路径
:return: 图像句柄
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
return dataset
def get_scope(self, filename):
"""
:param filename: tif路径
:return: 图像范围
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
im_scope = self.cal_img_scope(dataset)
del dataset
return im_scope
@staticmethod
def get_projection(filename):
"""
:param filename: tif路径
:return: 地图投影信息
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
im_proj = dataset.GetProjection()
del dataset
return im_proj
@staticmethod
def get_geotransform(filename):
"""
:param filename: tif路径
:return: 从图像坐标空间也称为像素线到地理参考坐标空间投影或地理坐标的仿射变换
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
geotransform = dataset.GetGeoTransform()
del dataset
return geotransform
def get_invgeotransform(filename):
"""
:param filename: tif路径
:return: 从地理参考坐标空间投影或地理坐标的到图像坐标空间
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
geotransform = dataset.GetGeoTransform()
geotransform=gdal.InvGeoTransform(geotransform)
del dataset
return geotransform
@staticmethod
def get_bands(filename):
"""
:param filename: tif路径
:return: 影像的波段数
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
bands = dataset.RasterCount
del dataset
return bands
@staticmethod
def geo2lonlat(dataset, x, y):
"""
将投影坐标转为经纬度坐标具体的投影坐标系由给定数据确定
:param dataset: GDAL地理数据
:param x: 投影坐标x
:param y: 投影坐标y
:return: 投影坐标(x, y)对应的经纬度坐标(lon, lat)
"""
prosrs = osr.SpatialReference()
prosrs.ImportFromWkt(dataset.GetProjection())
geosrs = prosrs.CloneGeogCS()
ct = osr.CoordinateTransformation(prosrs, geosrs)
coords = ct.TransformPoint(x, y)
return coords[:2]
@staticmethod
def get_band_array(filename, num=1):
"""
:param filename: tif路径
:param num: 波段序号
:return: 对应波段的矩阵数据
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
bands = dataset.GetRasterBand(num)
array = bands.ReadAsArray(0, 0, bands.XSize, bands.YSize)
# if 'int' in str(array.dtype):
# array[np.where(array == -9999)] = np.inf
# else:
# array[np.where(array < -9000.0)] = np.nan
del dataset
return array
@staticmethod
def get_data(filename):
"""
:param filename: tif路径
:return: 获取所有波段的数据
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
im_width = dataset.RasterXSize
im_height = dataset.RasterYSize
im_data = dataset.ReadAsArray(0, 0, im_width, im_height)
del dataset
return im_data
@staticmethod
def get_all_band_array(filename):
"""
大气延迟算法
将ERA-5影像所有波段存为一个数组, 波段数在第三维度 get_data->3788
:param filename 影像路径 get_all_band_array ->8837
:return: 影像数组
"""
dataset = gdal.Open(filename)
x_size = dataset.RasterXSize
y_size = dataset.RasterYSize
nums = dataset.RasterCount
array = np.zeros((y_size, x_size, nums), dtype=float)
if nums == 1:
bands_0 = dataset.GetRasterBand(1)
array = bands_0.ReadAsArray(0, 0, x_size, y_size)
else:
for i in range(0, nums):
bands = dataset.GetRasterBand(i+1)
arr = bands.ReadAsArray(0, 0, x_size, y_size)
array[:, :, i] = arr
return array
@staticmethod
def get_img_width(filename):
"""
:param filename: tif路径
:return: 影像宽度
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
width = dataset.RasterXSize
del dataset
return width
@staticmethod
def get_img_height(filename):
"""
:param filename: tif路径
:return: 影像高度
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
height = dataset.RasterYSize
del dataset
return height
@staticmethod
def read_img(filename):
"""
影像读取
:param filename:
:return:
"""
gdal.AllRegister()
img_dataset = gdal.Open(filename) # 打开文件
if img_dataset is None:
msg = 'Could not open ' + filename
logger.error(msg)
return None, None, None
im_proj = img_dataset.GetProjection() # 地图投影信息
if im_proj is None:
return None, None, None
im_geotrans = img_dataset.GetGeoTransform() # 仿射矩阵
im_width = img_dataset.RasterXSize # 栅格矩阵的行数
im_height = img_dataset.RasterYSize # 栅格矩阵的行数
im_arr = img_dataset.ReadAsArray(0, 0, im_width, im_height)
del img_dataset
return im_proj, im_geotrans, im_arr
def cal_img_scope(self, dataset):
"""
计算影像的地理坐标范围
根据GDAL的六参数模型将影像图上坐标行列号转为投影坐标或地理坐标根据具体数据的坐标系统转换
:param dataset :GDAL地理数据
:return: list[point_upleft, point_upright, point_downleft, point_downright]
"""
if dataset is None:
return None
img_geotrans = dataset.GetGeoTransform()
if img_geotrans is None:
return None
width = dataset.RasterXSize # 栅格矩阵的列数
height = dataset.RasterYSize # 栅格矩阵的行数
point_upleft = self.trans_rowcol2geo(img_geotrans, 0, 0)
point_upright = self.trans_rowcol2geo(img_geotrans, width, 0)
point_downleft = self.trans_rowcol2geo(img_geotrans, 0, height)
point_downright = self.trans_rowcol2geo(img_geotrans, width, height)
return [point_upleft, point_upright, point_downleft, point_downright]
@staticmethod
def get_scope_ori_sim(filename):
"""
计算影像的地理坐标范围
根据GDAL的六参数模型将影像图上坐标行列号转为投影坐标或地理坐标根据具体数据的坐标系统转换
:param dataset :GDAL地理数据
:return: list[point_upleft, point_upright, point_downleft, point_downright]
"""
gdal.AllRegister()
dataset = gdal.Open(filename)
if dataset is None:
return None
width = dataset.RasterXSize # 栅格矩阵的列数
height = dataset.RasterYSize # 栅格矩阵的行数
band1 = dataset.GetRasterBand(1)
array1 = band1.ReadAsArray(0, 0, band1.XSize, band1.YSize)
band2 = dataset.GetRasterBand(2)
array2 = band2.ReadAsArray(0, 0, band2.XSize, band2.YSize)
if array1[0, 0] < array1[0, width-1]:
point_upleft = [array1[0, 0], array2[0, 0]]
point_upright = [array1[0, width-1], array2[0, width-1]]
else:
point_upright = [array1[0, 0], array2[0, 0]]
point_upleft = [array1[0, width-1], array2[0, width-1]]
if array1[height-1, 0] < array1[height-1, width-1]:
point_downleft = [array1[height - 1, 0], array2[height - 1, 0]]
point_downright = [array1[height - 1, width - 1], array2[height - 1, width - 1]]
else:
point_downright = [array1[height - 1, 0], array2[height - 1, 0]]
point_downleft = [array1[height - 1, width - 1], array2[height - 1, width - 1]]
if(array2[0, 0] < array2[height - 1, 0]):
#上下调换顺序
tmp1 = point_upleft
point_upleft = point_downleft
point_downleft = tmp1
tmp2 = point_upright
point_upright = point_downright
point_downright = tmp2
return [point_upleft, point_upright, point_downleft, point_downright]
@staticmethod
def trans_rowcol2geo(img_geotrans,img_col, img_row):
"""
据GDAL的六参数模型仿射矩阵将影像图上坐标行列号转为投影坐标或地理坐标根据具体数据的坐标系统转换
:param img_geotrans: 仿射矩阵
:param img_col:图像纵坐标
:param img_row:图像横坐标
:return: [geo_x,geo_y]
"""
geo_x = img_geotrans[0] + img_geotrans[1] * img_col + img_geotrans[2] * img_row
geo_y = img_geotrans[3] + img_geotrans[4] * img_col + img_geotrans[5] * img_row
return [geo_x, geo_y]
@staticmethod
def write_era_into_img(filename, im_proj, im_geotrans, im_data):
"""
影像保存
:param filename:
:param im_proj:
:param im_geotrans:
:param im_data:
:return:
"""
gdal_dtypes = {
'int8': gdal.GDT_Byte,
'unit16': gdal.GDT_UInt16,
'int16': gdal.GDT_Int16,
'unit32': gdal.GDT_UInt32,
'int32': gdal.GDT_Int32,
'float32': gdal.GDT_Float32,
'float64': gdal.GDT_Float64,
}
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
datatype = gdal_dtypes[im_data.dtype.name]
else:
datatype = gdal.GDT_Float32
# 判读数组维数
if len(im_data.shape) == 3:
im_height, im_width, im_bands = im_data.shape # shape[0] 行数
else:
im_bands, (im_height, im_width) = 1, im_data.shape
# 创建文件
if os.path.exists(os.path.split(filename)[0]) is False:
os.makedirs(os.path.split(filename)[0])
driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
dataset.SetProjection(im_proj) # 写入投影
if im_bands == 1:
dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据
else:
for i in range(im_bands):
dataset.GetRasterBand(i + 1).WriteArray(im_data[:, :, i])
# dataset.GetRasterBand(i + 1).WriteArray(im_data[i])
del dataset
# 写GeoTiff文件
@staticmethod
def write_img(filename, im_proj, im_geotrans, im_data, no_data='0'):
"""
影像保存
:param filename: 保存的路径
:param im_proj:
:param im_geotrans:
:param im_data:
:param no_data: 把无效值设置为 nodata
:return:
"""
gdal_dtypes = {
'int8': gdal.GDT_Byte,
'unit16': gdal.GDT_UInt16,
'int16': gdal.GDT_Int16,
'unit32': gdal.GDT_UInt32,
'int32': gdal.GDT_Int32,
'float32': gdal.GDT_Float32,
'float64': gdal.GDT_Float64,
}
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
datatype = gdal_dtypes[im_data.dtype.name]
else:
datatype = gdal.GDT_Float32
flag = False
# 判读数组维数
if len(im_data.shape) == 3:
im_bands, im_height, im_width = im_data.shape
flag = True
else:
im_bands, (im_height, im_width) = 1, im_data.shape
# 创建文件
if os.path.exists(os.path.split(filename)[0]) is False:
os.makedirs(os.path.split(filename)[0])
driver = gdal.GetDriverByName("GTiff") # 数据类型必须有,因为要计算需要多大内存空间
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
dataset.SetProjection(im_proj) # 写入投影
if im_bands == 1:
# outRaster.GetRasterBand(1).WriteArray(array) # 写入数组数据
if flag:
outband = dataset.GetRasterBand(1)
outband.WriteArray(im_data[0])
if no_data != 'null':
outband.SetNoDataValue(np.double(no_data))
outband.FlushCache()
else:
outband = dataset.GetRasterBand(1)
outband.WriteArray(im_data)
if no_data != 'null':
outband.SetNoDataValue(np.double(no_data))
outband.FlushCache()
else:
for i in range(im_bands):
outband = dataset.GetRasterBand(1 + i)
outband.WriteArray(im_data[i])
if no_data != 'null':
outband.SetNoDataValue(np.double(no_data))
outband.FlushCache()
# outRaster.GetRasterBand(i + 1).WriteArray(array[i])
del dataset
# 写GeoTiff文件
@staticmethod
def write_img_envi(filename, im_proj, im_geotrans, im_data, no_data='null'):
"""
影像保存
:param filename: 保存的路径
:param im_proj:
:param im_geotrans:
:param im_data:
:param no_data: 把无效值设置为 nodata
:return:
"""
gdal_dtypes = {
'int8': gdal.GDT_Byte,
'unit16': gdal.GDT_UInt16,
'int16': gdal.GDT_Int16,
'unit32': gdal.GDT_UInt32,
'int32': gdal.GDT_Int32,
'float32': gdal.GDT_Float32,
'float64': gdal.GDT_Float64,
}
if not gdal_dtypes.get(im_data.dtype.name, None) is None:
datatype = gdal_dtypes[im_data.dtype.name]
else:
datatype = gdal.GDT_Float32
# 判读数组维数
if len(im_data.shape) == 3:
im_bands, im_height, im_width = im_data.shape
else:
im_bands, (im_height, im_width) = 1, im_data.shape
# 创建文件
if os.path.exists(os.path.split(filename)[0]) is False:
os.makedirs(os.path.split(filename)[0])
driver = gdal.GetDriverByName("ENVI") # 数据类型必须有,因为要计算需要多大内存空间
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
dataset.SetProjection(im_proj) # 写入投影
if im_bands == 1:
# outRaster.GetRasterBand(1).WriteArray(array) # 写入数组数据
outband = dataset.GetRasterBand(1)
outband.WriteArray(im_data)
if no_data != 'null':
outband.SetNoDataValue(no_data)
outband.FlushCache()
else:
for i in range(im_bands):
outband = dataset.GetRasterBand(1 + i)
outband.WriteArray(im_data[i])
outband.FlushCache()
# outRaster.GetRasterBand(i + 1).WriteArray(array[i])
del dataset
@staticmethod
def write_img_rpc(filename, im_proj, im_geotrans, im_data, rpc_dict):
"""
图像中写入rpc信息
"""
# 判断栅格数据的数据类型
if 'int8' in im_data.dtype.name:
datatype = gdal.GDT_Byte
elif 'int16' in im_data.dtype.name:
datatype = gdal.GDT_Int16
else:
datatype = gdal.GDT_Float32
# 判读数组维数
if len(im_data.shape) == 3:
im_bands, im_height, im_width = im_data.shape
else:
im_bands, (im_height, im_width) = 1, im_data.shape
# 创建文件
driver = gdal.GetDriverByName("GTiff")
dataset = driver.Create(filename, im_width, im_height, im_bands, datatype)
dataset.SetGeoTransform(im_geotrans) # 写入仿射变换参数
dataset.SetProjection(im_proj) # 写入投影
# 写入RPC参数
for k in rpc_dict.keys():
dataset.SetMetadataItem(k, rpc_dict[k], 'RPC')
if im_bands == 1:
dataset.GetRasterBand(1).WriteArray(im_data) # 写入数组数据
else:
for i in range(im_bands):
dataset.GetRasterBand(i + 1).WriteArray(im_data[i])
del dataset
def transtif2mask(self,out_tif_path, in_tif_path, threshold):
"""
:param out_tif_path:输出路径
:param in_tif_path:输入的路径
:param threshold:阈值
"""
im_proj, im_geotrans, im_arr, im_scope = self.read_img(in_tif_path)
im_arr_mask = (im_arr < threshold).astype(int)
self.write_img(out_tif_path, im_proj, im_geotrans, im_arr_mask)
def write_quick_view(self, tif_path, color_img=False, quick_view_path=None):
"""
生成快视图,默认快视图和影像同路径且同名
:param tif_path:影像路径
:param color_img:是否生成随机伪彩色图
:param quick_view_path:快视图路径
"""
if quick_view_path is None:
quick_view_path = os.path.splitext(tif_path)[0]+'.jpg'
n = self.get_bands(tif_path)
if n == 1: # 单波段
t_data = self.get_data(tif_path)
else: # 多波段,转为强度数据
t_data = self.get_data(tif_path)
t_data = t_data.astype(float)
t_data = np.sqrt(t_data[0] ** 2 + t_data[1] ** 2)
t_r = self.get_img_height(tif_path)
t_c = self.get_img_width(tif_path)
if t_r > 10000 or t_c > 10000:
q_r = int(t_r / 10)
q_c = int(t_c / 10)
elif 1024 < t_r < 10000 or 1024 < t_c < 10000:
if t_r > t_c:
q_r = 1024
q_c = int(t_c/t_r * 1024)
else:
q_c = 1024
q_r = int(t_r/t_c * 1024)
else:
q_r = t_r
q_c = t_c
if color_img is True:
# 生成伪彩色图
img = np.zeros((t_r, t_c, 3), dtype=np.uint8) # (高,宽,维度)
u = np.unique(t_data)
for i in u:
if i != 0:
w = np.where(t_data == i)
img[w[0], w[1], 0] = np.random.randint(0, 255) # 随机生成一个0到255之间的整数 可以通过挑参数设定不同的颜色范围
img[w[0], w[1], 1] = np.random.randint(0, 255)
img[w[0], w[1], 2] = np.random.randint(0, 255)
img = cv2.resize(img, (q_c, q_r)) # (宽,高)
cv2.imwrite(quick_view_path, img)
# cv2.imshow("result4", img)
# cv2.waitKey(0)
else:
# 灰度图
min = np.percentile(t_data, 2) # np.nanmin(t_data)
max = np.percentile(t_data, 98) # np.nanmax(t_data)
t_data[np.isnan(t_data)] = max
if (max - min) < 256:
t_data = (t_data - min) / (max - min) * 255
out_img = Image.fromarray(t_data)
out_img = out_img.resize((q_c, q_r)) # 重采样
out_img = out_img.convert("L") # 转换成灰度图
out_img.save(quick_view_path)
def limit_field(self, out_path, in_path, min_value, max_value):
"""
:param out_path:输出路径
:param in_path:主mask路径输出影像采用主mask的地理信息
:param min_value
:param max_value
"""
proj = self.get_projection(in_path)
geotrans = self.get_geotransform(in_path)
array = self.get_band_array(in_path, 1)
array[array < min_value] = min_value
array[array > max_value] = max_value
self.write_img(out_path, proj, geotrans, array)
return True
def band_merge(self, lon, lat, ori_sim):
lon_arr = self.get_data(lon)
lat_arr = self.get_data(lat)
temp = np.zeros((2, lon_arr.shape[0], lon_arr.shape[1]), dtype=float)
temp[0, :, :] = lon_arr[:, :]
temp[1, :, :] = lat_arr[:, :]
self.write_img(ori_sim, '', [0.0, 1.0, 0.0, 0.0, 0.0, 1.0], temp, '0')
def get_scopes(self, ori_sim):
ori_sim_data = self.get_data(ori_sim)
lon = ori_sim_data[0, :, :]
lat = ori_sim_data[1, :, :]
min_lon = np.nanmin(np.where((lon != 0) & ~np.isnan(lon), lon, np.inf))
max_lon = np.nanmax(np.where((lon != 0) & ~np.isnan(lon), lon, -np.inf))
min_lat = np.nanmin(np.where((lat != 0) & ~np.isnan(lat), lat, np.inf))
max_lat = np.nanmax(np.where((lat != 0) & ~np.isnan(lat), lat, -np.inf))
scopes = [[min_lon, max_lat], [max_lon, max_lat], [min_lon, min_lat], [max_lon, min_lat]]
return scopes
def get_pixel_value(file_path, lon, lat):
# 打开栅格数据集
dataset = gdal.Open(file_path, gdal.GA_ReadOnly)
if dataset is None:
print("File don't open : {}".format(file_path))
return None
# 获取地理转换信息,用于将经纬度转换为栅格坐标
geotransform = dataset.GetGeoTransform()
inv_geotransform = gdal.InvGeoTransform(geotransform)
# 将经纬度转换为栅格坐标
x, y = gdal.ApplyGeoTransform(inv_geotransform, lon, lat)
# 获取栅格数据集的波段数
num_bands = dataset.RasterCount
pixel_values = []
# 逐波段获取像元值
for i in range(1, num_bands + 1):
band = dataset.GetRasterBand(i)
# 读取像元值
value = band.ReadAsArray(int(x), int(y), 1, 1)[0, 0]
pixel_values.append(value)
# 关闭数据集
dataset = None
return pixel_values
# if __name__ == '__main__':
# path = r'D:\BaiduNetdiskDownload\GZ\lon.rdr'
# path2 = r'D:\BaiduNetdiskDownload\GZ\lat.rdr'
# path3 = r'D:\BaiduNetdiskDownload\GZ\lon_lat.tif'
# s = ImageHandler().band_merge(path, path2, path3)
# print(s)
# pass

View File

@ -10,8 +10,12 @@ import numpy as np
import shelve
import isce
import isceobj
from mroipac.baseline.Baseline import Baseline
# import matplotlib
# matplotlib.use('Agg')
# import matplotlib.dates as mdates
# import matplotlib.pyplot as plt
from mroipac.baseline.Baseline import Baseline # 源代码
# from components.mroipac.baseline.Baseline import Baseline
filtStrength = '0.8'
noMCF = 'False'
@ -41,6 +45,7 @@ class config(object):
self.f.write('cropFrame : ' + '\n')
self.f.write('input : ' + self.inputDir + '\n')
self.f.write('box_str : ' + self.bbox + '\n')
self.f.write('dem_str : ' + self.demPath + '\n')
self.f.write('output : ' + self.cropOutputDir + '\n')
##For booleans, just having an entry makes it True
@ -314,6 +319,7 @@ class run(object):
configObj.inputDir = os.path.join(self.fullFrameSlcDir, d)
configObj.cropOutputDir = os.path.join(self.slcDir, d)
configObj.bbox = self.bbox
configObj.demPath=self.dem
configObj.nativeDoppler = native
configObj.israw = israw
configObj.cropFrame('[Function-1]')
@ -552,7 +558,7 @@ class run(object):
def secondarys_fine_resampleSlc(self, stackReference, secondaryDates, config_prefix, split=False):
# copy over the reference into the final SLC folder as well
self.runf.write(self.text_cmd + ' referenceStackCopy.py -i ' +
self.runf.write(self.text_cmd + 'referenceStackCopy.py -i ' +
os.path.join(self.slcDir,
stackReference,
stackReference + self.raw_string + '.slc') + ' -o ' +
@ -702,8 +708,10 @@ def baselinePair(baselineDir, reference, secondary,doBaselines=True):
bObj = Baseline()
bObj.configure()
bObj.wireInputPort(name='referenceFrame', object=mFrame)
bObj.wireInputPort(name='secondaryFrame', object=sFrame)
# bObj.wireInputPort(name='referenceFrame', object=mFrame) # 原始代码
# bObj.wireInputPort(name='secondaryFrame', object=sFrame)
bObj.addReferenceFrame_new(mFrame)
bObj.addSecondaryFrame_new(sFrame)
bObj.baseline() # calculate baseline from orbits
pBaselineBottom = bObj.pBaselineBottom
pBaselineTop = bObj.pBaselineTop
@ -754,60 +762,57 @@ def selectPairs(inps,stackReference, secondaryDates, acuisitionDates,doBaselines
if (db < inps.dbThr) and (dt < inps.dtThr):
pairs.append((acuisitionDates[i],acuisitionDates[j]))
plotNetwork(baselineDict, timeDict, pairs,os.path.join(inps.workDir,'pairs.pdf'))
# plotNetwork(baselineDict, timeDict, pairs,os.path.join(inps.workDir,'pairs.pdf'))
return pairs
def plotNetwork(baselineDict, timeDict, pairs,save_name='pairs.png'):
import matplotlib
matplotlib.use('Agg')
import matplotlib.dates as mdates
import matplotlib.pyplot as plt
datefmt='%Y%m%d'
fig1 = plt.figure(1)
ax1=fig1.add_subplot(111)
# def plotNetwork(baselineDict, timeDict, pairs,save_name='pairs.png'):
# datefmt='%Y%m%d'
# fig1 = plt.figure(1)
# ax1=fig1.add_subplot(111)
ax1.cla()
for ni in range(len(pairs)):
# ax1.plot(np.array([timeDict[pairs[ni][0]].days,timeDict[pairs[ni][1]].days]),
ax1.plot([datetime.datetime.strptime(pairs[ni][0],datefmt),
datetime.datetime.strptime(pairs[ni][1], datefmt)],
np.array([baselineDict[pairs[ni][0]],
baselineDict[pairs[ni][1]]]),
'-ko',lw=1, ms=4, alpha=0.7, mfc='r')
# ax1.cla()
# for ni in range(len(pairs)):
# # ax1.plot(np.array([timeDict[pairs[ni][0]].days,timeDict[pairs[ni][1]].days]),
# ax1.plot([datetime.datetime.strptime(pairs[ni][0],datefmt),
# datetime.datetime.strptime(pairs[ni][1], datefmt)],
# np.array([baselineDict[pairs[ni][0]],
# baselineDict[pairs[ni][1]]]),
# '-ko',lw=1, ms=4, alpha=0.7, mfc='r')
myFmt = mdates.DateFormatter('%Y-%m')
ax1.xaxis.set_major_formatter(myFmt)
# myFmt = mdates.DateFormatter('%Y-%m')
# ax1.xaxis.set_major_formatter(myFmt)
plt.title('Baseline plot')
plt.xlabel('Time')
plt.ylabel('Perp. Baseline')
plt.tight_layout()
# plt.title('Baseline plot')
# plt.xlabel('Time')
# plt.ylabel('Perp. Baseline')
# plt.tight_layout()
plt.savefig(save_name)
# plt.savefig(save_name)
###Check degree of each SLC
datelist = [k for k,v in list(timeDict.items())]
connMat = np.zeros((len(pairs), len(timeDict)))
for ni in range(len(pairs)):
connMat[ni, datelist.index(pairs[ni][0])] = 1.0
connMat[ni, datelist.index(pairs[ni][1])] = -1.0
# ###Check degree of each SLC
# datelist = [k for k,v in list(timeDict.items())]
# connMat = np.zeros((len(pairs), len(timeDict)))
# for ni in range(len(pairs)):
# connMat[ni, datelist.index(pairs[ni][0])] = 1.0
# connMat[ni, datelist.index(pairs[ni][1])] = -1.0
slcSum = np.sum( np.abs(connMat), axis=0)
minDegree = np.min(slcSum)
# slcSum = np.sum( np.abs(connMat), axis=0)
# minDegree = np.min(slcSum)
print('##################')
print('SLCs with min degree connection of {0}'.format(minDegree))
for ii in range(slcSum.size):
if slcSum[ii] == minDegree:
print(datelist[ii])
print('##################')
# print('##################')
# print('SLCs with min degree connection of {0}'.format(minDegree))
# for ii in range(slcSum.size):
# if slcSum[ii] == minDegree:
# print(datelist[ii])
# print('##################')
if np.linalg.matrix_rank(connMat) != (len(timeDict) - 1):
raise Exception('The network for cascading coregistration is not connected')
# if np.linalg.matrix_rank(connMat) != (len(timeDict) - 1):
# raise Exception('The network for cascading coregistration is not connected')
def writeJobFile(runFile):

View File

@ -12,6 +12,12 @@ import mroipac
import os
import shelve
import filecmp
from isceobj.Planet.Planet import Planet
from isceobj.Util.Poly2D import Poly2D
from osgeo import gdal
from osgeo import gdalconst
import numpy as np
from isceobj.Orbit.Orbit import Orbit
def createParser():
parser = argparse.ArgumentParser( description='Generate a baseline grid for interferograms')
@ -34,7 +40,7 @@ def cmdLineParse(iargs = None):
def getMergedOrbit(product):
from isceobj.Orbit.Orbit import Orbit
###Create merged orbit
orb = Orbit()
@ -43,7 +49,9 @@ def getMergedOrbit(product):
#Add first orbit to begin with
for sv in product.orbit:
orb.addStateVector(sv)
orb.setsessionMode(product.orbit.sessionMode)
orb.setPolyParams(product.orbit.polynum,product.orbit.oribitStartTime,product.orbit.A_arr)
orb.setDoppler(product.orbit.refrenceTime,product.orbit.dopperPoly)
return orb
@ -51,15 +59,13 @@ def main(iargs=None):
'''Compute baseline.
'''
inps=cmdLineParse(iargs)
from isceobj.Planet.Planet import Planet
from isceobj.Util.Poly2D import Poly2D
import numpy as np
import shelve
baselineDir = os.path.dirname(inps.baselineFile)
if baselineDir != '':
os.makedirs(baselineDir, exist_ok=True)
with shelve.open(os.path.join(inps.reference, 'data'), flag='r') as mdb:
reference = mdb['frame']
@ -67,6 +73,8 @@ def main(iargs=None):
with shelve.open(os.path.join(inps.secondary, 'data'), flag='r') as mdb:
secondary = mdb['frame']
isLT1AB=reference._processingSystem == 'LT1AB'
# check if the reference and secondary shelf are the same, i.e. it is baseline grid for the reference
reference_SensingStart = reference.getSensingStart()
secondary_SensingStart = secondary.getSensingStart()
@ -100,8 +108,20 @@ def main(iargs=None):
azimuthLimits = (mSensingStop - mSensingStart).total_seconds()
nAzimuth = int(np.max([30,int(np.ceil(azimuthLimits))]))
# print("mFarRange", mFarRange)
# print("mStartingRange", mStartingRange)
# print("mSensingStart", mSensingStart)
# print("mSensingStop", mSensingStop)
# print("nPixels", nPixels)
# print("nLines", nLines)
# print("dr", dr)
# print("rangeLimits", rangeLimits)
# print("nRange", nRange)
# print("slantRange", slantRange)
# print("azimuthLimits", azimuthLimits)
# print("nAzimuth", nAzimuth)
azimuthTime = [mSensingStart + datetime.timedelta(seconds= x * azimuthLimits/(nAzimuth-1.0)) for x in range(nAzimuth)]
# print("azimuthTime", azimuthTime)
doppler = Poly2D()
doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]])
@ -116,7 +136,8 @@ def main(iargs=None):
Bperp.tofile(fid)
else:
for ii, taz in enumerate(azimuthTime):
# print("ii", ii)
# print("taz", taz)
referenceSV = mOrb.interpolate(taz, method='hermite')
mxyz = np.array(referenceSV.getPosition())
mvel = np.array(referenceSV.getVelocity())
@ -126,7 +147,7 @@ def main(iargs=None):
target = mOrb.rdr2geo(taz, rng)
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
slvTime, slvrng = sOrb.geo2rdr(target, doppler=doppler, wvl=0)
slvTime, slvrng = sOrb.geo2rdr(target, doppler=doppler, wvl=0)
secondarySV = sOrb.interpolateOrbit(slvTime, method='hermite')
@ -158,11 +179,15 @@ def main(iargs=None):
###Create oversampled VRT file
cmd = 'gdal_translate -of VRT -ot Float32 -r bilinear -outsize {xsize} {ysize} {infile}.vrt {infile}.full.vrt'.format(xsize=nPixels, ysize=nLines, infile=inps.baselineFile)
translate_options = gdal.TranslateOptions(format='VRT',outputType=gdalconst.GDT_Float64, resampleAlg='bilinear', width=nPixels, height=nLines)
gdal.Translate(inps.baselineFile + ".full.vrt", inps.baselineFile + '.vrt', options=translate_options)
status = os.system(cmd)
if status:
raise Exception('cmd: {0} Failed'.format(cmd))
# cmd = 'gdal_translate -of VRT -ot Float32 -r bilinear -outsize {xsize} {ysize} {infile}.vrt {infile}.full.vrt'.format(xsize=nPixels, ysize=nLines, infile=inps.baselineFile)
# status = os.system(cmd)
# if status:
# raise Exception('cmd: {0} Failed'.format(cmd))
if __name__ == '__main__':
'''

View File

@ -5,12 +5,17 @@
import os
import argparse
import configparser
from tkinter.messagebox import NO
import numpy as np
import isce
import isceobj
from iscesys.DataManager import createManager
from contrib.demUtils.SWBDStitcher import SWBDStitcher
### 扩展现有的方法
from osgeo import gdal
###
EXAMPLE = """example:
createWaterMask.py -b 31 33 130 132
@ -79,6 +84,11 @@ def dem2bbox(dem_file):
return bbox
def copydem_watermask(dem,out_water):
pass
def download_waterMask(bbox, dem_file, fill_value=-1):
out_dir = os.getcwd()
# update out_dir and/or bbox if dem_file is input
@ -92,13 +102,39 @@ def download_waterMask(bbox, dem_file, fill_value=-1):
sw.configure()
#inps.waterBodyGeo = sw.defaultName(inps.bbox)
sw.outputFile = os.path.join(out_dir, sw.defaultName(bbox))
# 创建waterMask ---- 代码废弃
# dem=gdal.Open(dem_file) # 根据输入dem 创建wbd. [0,255] 0: 不是水域255 水域
# row_count = dem.RasterYSize # 读取图像的行数
# col_count = dem.RasterXSize # 读取图像的列数
# im_proj = dem.GetProjection()
# im_geotrans = list(dem.GetGeoTransform())
# gt = im_geotrans # 坐标变换参数
# gtiff_driver = gdal.GetDriverByName('GTiff')
# swdb=gtiff_driver.Create(sw.outputFile,col_count,row_count,1,gdal.GDT_Byte)
# swdb.SetGeoTransform(im_geotrans) # 写入仿射变换参数
# swdb.SetProjection(im_proj) # 写入投影
# # 写入tiff
# sbd=np.zeros((row_count,col_count),dtype=np.uint8)
# swdb.GetRasterBand(1).WriteArray(sbd)
# swdb.FlushCache()
# del swdb # 删除影像
print("=================================")
print("isce2/contrib/stack/stripmapStack/createWaterMask.py","代码已经修改")
print("================================")
if os.path.isfile(sw.outputFile):
print('wbd file already exists at: {}'.format(sw.outputFile))
print('skip re-downloading and continue')
return None
else:
print("========================")
print("输出结果None")
print("==========================")
return None
sw._noFilling = False
sw._fillingValue = fill_value
sw.stitch(bbox[0:2], bbox[2:])
sw.stitch(bbox[0:2], bbox[2:]) # 下载对应的影像
return sw.outputFile
@ -109,17 +145,27 @@ def geo2radar(geo_file, rdr_file, lat_file, lon_file):
sw.toRadar(geo_file, lat_file, lon_file, rdr_file)
return rdr_file
def geo2radarUsr(rdr_file, lat_file, lon_file):
#inps.waterBodyRadar = inps.waterBodyGeo + '.rdr'
print('converting water mask file to radar coordinates ...')
sw = SWBDStitcher()
sw.toRadarNone( lat_file, lon_file, rdr_file)
return rdr_file
#looks.py -i watermask.msk -r 4 -a 14 -o 'waterMask.14alks_4rlks.msk'
#imageMath.py -e='a*b' --a=filt_20100911_20101027.int --b=watermask.14alks_4rlks.msk -o filt_20100911_20101027_masked.int -t cfloat -s BIL
def main(iargs=None):
print("============================")
print("当前影像")
print("================================")
inps = cmdLineParse(iargs)
geo_file = download_waterMask(inps.bbox, inps.demName, inps.fillValue)
if inps.latName and inps.lonName:
geo2radar(geo_file, inps.outfile, inps.latName, inps.lonName)
if geo_file is None:
geo2radarUsr( inps.outfile, inps.latName, inps.lonName)
else:
geo2radar(geo_file, inps.outfile, inps.latName, inps.lonName)
return
@ -127,7 +173,7 @@ if __name__ == '__main__' :
'''
creates a water mask and transforms to radar Coord
'''
main()
main()

View File

@ -9,9 +9,14 @@ import argparse
import datetime
import os
from isceobj.Util.decorators import use_api
# from isce.components.isceobj.Util.decorators import use_api
from imageMath import IML
from isceobj.Orbit import Orbit
from isceobj.Util.Poly2D import Poly2D
from isceobj.Planet.Planet import Planet
from isceobj.Constants import SPEED_OF_LIGHT
import logging
from ImageHandle import ImageHandler
def createParser():
'''
@ -24,6 +29,7 @@ def createParser():
help='Bbox (SNWE in degrees)')
parser.add_argument('-B', '--box_str' ,dest='bbox_str', type=str, default=None,
help='Bbox (SNWE in degrees)')
parser.add_argument("-D","--dem_str",dest="demPath",type=str,required=True,help='DEM Path')
parser.add_argument('-o', '--output', dest='outdir', type=str, required=True,
help='Output directory for the cropped raw / slc file')
parser.add_argument('-r', '--raw', dest='israw', action='store_true', default=False,
@ -44,15 +50,10 @@ def cmdLineParse(iargs = None):
return inps
#####Helper functions for geobox manipulation
def geoboxToAzrgbox(frame, geobox, israw=False, isnative=False, margin=0.02):
def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0.02):
'''
Convert a geo bounding box - SNWE to pixel limits.
'''
from isceobj.Util.Poly2D import Poly2D
from isceobj.Planet.Planet import Planet
from isceobj.Constants import SPEED_OF_LIGHT
rgs = []
azs = []
zrange = [-500. , 9000.]
@ -82,26 +83,55 @@ def geoboxToAzrgbox(frame, geobox, israw=False, isnative=False, margin=0.02):
doppler.initPoly(azimuthOrder=0, rangeOrder=len(coeff)-1, coeffs=[coeff])
else:
###Zero doppler system
print('FALSE__________________________________')
doppler = Poly2D()
doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]])
print("session,",frame.getProcessingSystem())
session=frame.getProcessingSystem() # 数据
####Do
for z in zrange:
for combo in combos:
try:
taz, rgm = frame.orbit.geo2rdr(combo + [z], side=lookSide,
doppler=doppler, wvl=wvl)
azs.append(taz)
rgs.append(rgm)
except:
pass
for combo in combos:
try:
z=ImageHandler.get_pixel_value(demPath,combo[1],combo[0])[0]
llh = combo + [z]
taz, rgm = frame.orbit.geo2rdr(combo + [z], side=lookSide,
doppler=doppler, wvl=wvl)
azs.append(taz)
rgs.append(rgm)
except Exception as e:
pass
# print("="*20)
# print("azs",azs)
# print("rgs",rgs)
# print("="*20)
if len(azs) <= 1:
raise Exception('Could not map geobbox coordinates to image')
azrgbox = [np.min(azs), np.max(azs), np.min(rgs), np.max(rgs)]
####sensing start
temp1 = azrgbox[0]
temp2 = frame.sensingStart
temp = (azrgbox[0] - frame.sensingStart).total_seconds()
ymin = np.floor( (azrgbox[0] - frame.sensingStart).total_seconds() * frame.PRF)
print('Line start: ', ymin)
ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1))
####sensing stop
ymax = np.ceil( (azrgbox[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1
print('Line stop: ', ymax)
ymax = np.int32( np.clip(ymax, 1, frame.numberOfLines))
print('Line limits: ', ymin, ymax)
print('Original Line Limits: ', 0, frame.numberOfLines)
if israw:
####If cropping raw product, need to add an aperture length in range and azimuth
@ -159,15 +189,18 @@ def cropFrame(frame, limits, outdir, israw=False):
factor = 1
####sensing start
temp1 = limits[0]
temp2 = frame.sensingStart
temp = (limits[0] - frame.sensingStart).total_seconds()
ymin = np.floor( (limits[0] - frame.sensingStart).total_seconds() * frame.PRF)
print('Line start: ', ymin)
ymin = int( np.clip(ymin, 0, frame.numberOfLines-1))
ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1))
####sensing stop
ymax = np.ceil( (limits[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1
print('Line stop: ', ymax)
ymax = int( np.clip(ymax, 1, frame.numberOfLines))
ymax = np.int32( np.clip(ymax, 1, frame.numberOfLines))
print('Line limits: ', ymin, ymax)
print('Original Line Limits: ', 0, frame.numberOfLines)
@ -185,13 +218,13 @@ def cropFrame(frame, limits, outdir, israw=False):
####starting range
xmin = np.floor( (limits[2] - frame.startingRange)/frame.instrument.rangePixelSize)
print('Pixel start: ', xmin)
xmin = int(np.clip(xmin, 0, (frame.image.width//factor)-1))
xmin = np.int32(np.clip(xmin, 0, (frame.image.width//factor)-1))
####far range
xmax = np.ceil( (limits[3] - frame.startingRange)/frame.instrument.rangePixelSize)+1
print('Pixel stop: ', xmax)
xmax = int(np.clip(xmax, 1, frame.image.width//factor))
xmax = np.int32(np.clip(xmax, 1, frame.image.width//factor))
print('Pixel limits: ', xmin, xmax)
print('Original Pixel Limits: ', 0, frame.image.width//factor)
@ -280,9 +313,8 @@ def main(iargs=None):
else:
frame = slcFrame
#####Determine azimuth and range limits
limits = geoboxToAzrgbox(frame, inps.bbox,
limits = geoboxToAzrgbox(frame, inps.bbox, inps.demPath,
israw=inps.israw, isnative=inps.isnative)

View File

@ -0,0 +1,32 @@
#!/usr/bin/env python3
########################
#Author: Heresh Fattahi
#######################
import os, glob , sys
import symtable
import shelve
import numpy
import numpy.matlib
import statistics
#以下是当前目录的py文件
import baselineGrid
import denseOffsets
import geo2rdr
import geocodeGdal
import geocode
import invertMisreg
import createWaterMask
import cropFrame
import refineSecondaryTiming
import uncompressFile
import FilterAndCoherence
import resampleSlc
import crossmul
import Stack
import topo
import unwrap

View File

@ -0,0 +1,192 @@
#!/usr/bin/env python3
# David Bekaert
import os
import glob
import argparse
from uncompressFile import uncompressfile
import shutil
import xml.etree.ElementTree as etree
def createParser():
'''
Create command line parser.
'''
parser = argparse.ArgumentParser(description='Prepare GF3 SLC processing (unzip/untar files, organize in date folders, generate script to unpack into isce formats). For now, it cannot merge multiple scenes')
parser.add_argument('-i', '--input', dest='input', type=str, required=False,
help='directory with the slc data')
parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False,
help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)')
parser.add_argument('-o', '--output', dest='output', type=str, required=False,
help='output directory where data needs to be unpacked into isce format (for script generation).')
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;',
help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;')
return parser
def cmdLineParse(iargs=None):
'''
Command line parser.
'''
parser = createParser()
return parser.parse_args(args = iargs)
def get_Date(RSAT2folder):
# will search for different version of workreport to be compatible with ASf, WInSAR etc
RSAT2file = glob.glob(os.path.join(RSAT2folder,'*.meta.xml'))
# if nothing is found return a failure
if len(RSAT2file) > 0:
RSAT2file = RSAT2file[0]
# loading the date information from the product.xml file
tree = etree.parse(RSAT2file)
root = tree.getroot()
# for attributes in root.iter('{http://www.rsi.ca/rs2/prod/xml/schemas}sensor'):
# attribute_list = list(attributes)
# for attribute in attribute_list:
# if attribute.tag=='{http://www.rsi.ca/rs2/prod/xml/schemas}rawDataStartTime':
# date = attribute.text
# UTC = date[11:16]
# acquisitionDate = date[0:4]+date[5:7]+date[8:10]
image_time = root.find('imageinfo').find('imagingTime').find('start').text
if image_time != None:
acquisitionDate = image_time[0:4]+image_time[5:7]+image_time[8:10]
if len(acquisitionDate)==8:
successflag = True
return successflag, acquisitionDate
# if it reached here it could not find the acqusiitionDate
successflag = False
acquisitionDate = 'FAIL'
return successflag, acquisitionDate
def main(iargs=None):
'''
The main driver.
'''
inps = cmdLineParse(iargs)
# parsing required inputs
inputDir = os.path.abspath(inps.input)
# parsing optional inputs
if inps.output:
outputDir = os.path.abspath(inps.output)
else:
outputDir = None
rmfile = inps.rmfile
# inputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/download/'
# inputDir = os.path.abspath(inputDirs)
# outputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/SLC'
# outputDir = os.path.abspath(outputDirs)
# rmfile = False
# filename of the runfile
run_unPack = os.path.join(inputDir, 'run_unPackGF3.txt')
# loop over the different folder, RSAT2 zip/tar files and unzip them, make the names consistent
RSAT2_extensions = (os.path.join(inputDir, 'GF3*.zip'),os.path.join(inputDir, 'GF3*.tar'),os.path.join(inputDir, 'GF3*.gz'))
for RSAT2_extension in RSAT2_extensions:
RSAT2_filesfolders = glob.glob(RSAT2_extension)
for RSAT2_infilefolder in RSAT2_filesfolders:
## the path to the folder/zip
workdir = os.path.dirname(RSAT2_infilefolder)
## get the output name folder without any extensions
temp = os.path.basename(RSAT2_infilefolder)
# trim the extensions and keep only very first part
parts = temp.split(".tar.gz")
parts = parts[0].split('-')
RSAT2_outfolder = parts[0]
# add the path back in
RSAT2_outfolder = os.path.join(workdir,RSAT2_outfolder)
# loop over two cases (either file or folder):
### this is a file, try to unzip/untar it
if os.path.isfile(RSAT2_infilefolder):
# unzip the file in the outfolder
successflag_unzip = uncompressfile(RSAT2_infilefolder,RSAT2_outfolder)
# put failed files in a seperate directory
if not successflag_unzip:
os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True)
os.rename(RSAT2_infilefolder,os.path.join(workdir,'FAILED_FILES','.'))
else:
# check if file needs to be removed or put in archive folder
if rmfile:
os.remove(RSAT2_infilefolder)
# print('Deleting: ' + RSAT2_infilefolder)
else:
os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True)
# cmd = 'mv ' + RSAT2_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.')
# os.system(cmd)
shutil.move(RSAT2_infilefolder, os.path.join(workdir,'ARCHIVED_FILES','.'))
# loop over the different RSAT2 folders and make sure the folder names are consistent.
# this step is not needed unless the user has manually unzipped data before.
RSAT2_folders = glob.glob(os.path.join(inputDir, 'GF3*'))
for RSAT2_folder in RSAT2_folders:
# in case the user has already unzipped some files, make sure they are unzipped similar like the uncompressfile code
temp = os.path.basename(RSAT2_folder)
parts = temp.split(".tar.gz")
parts = parts[0].split('-')
RSAT2_outfolder_temp = parts[0]
RSAT2_outfolder_temp = os.path.join(os.path.dirname(RSAT2_folder),RSAT2_outfolder_temp)
# check if the folder (RSAT2_folder) has a different filename as generated from the uncompressFile code (RSAT2_outfolder_temp)
if not (RSAT2_outfolder_temp == RSAT2_folder):
# it is different, check if the RSAT2_outfolder_temp already exists, if yes, delete the current folder
if os.path.isdir(RSAT2_outfolder_temp):
# print('Remove ' + RSAT2_folder + ' as ' + RSAT2_outfolder_temp + ' exists...')
# check if this folder already exist, if so overwrite it
shutil.rmtree(RSAT2_folder)
# loop over the different RSAT2 folders and organize in date folders
RSAT2_folders = glob.glob(os.path.join(inputDir, 'GF3*'))
for RSAT2_folder in RSAT2_folders:
# get the date
successflag, imgDate = get_Date(RSAT2_folder)
workdir = os.path.dirname(RSAT2_folder)
if successflag:
# move the file into the date folder
SLC_dir = os.path.join(workdir,imgDate,'')
if os.path.isdir(SLC_dir):
shutil.rmtree(SLC_dir)
# cmd = 'mv ' + RSAT2_folder + ' ' + SLC_dir
# os.system(cmd)
shutil.move(RSAT2_folder, SLC_dir)
print ('Succes: ' + imgDate)
else:
print('Failed: ' + RSAT2_folder)
# now generate the unpacking script for all the date dirs
dateDirs = glob.glob(os.path.join(inputDir,'2*'))
if outputDir is not None:
f = open(run_unPack,'w')
for dateDir in dateDirs:
RSAT2Files = glob.glob(os.path.join(dateDir, 'GF3*.tiff'))
if len(RSAT2Files) <= 0:
RSAT2Files = glob.glob(os.path.join(dateDir, 'GF3*.tif'))
if len(RSAT2Files)>0:
acquisitionDate = os.path.basename(dateDir)
slcDir = os.path.join(outputDir, acquisitionDate)
os.makedirs(slcDir, exist_ok=True)
cmd = 'unpackFrame_GF3.exe -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
result = os.system(cmd)
# f.write(inps.text_cmd + cmd+'\n')
print(cmd, result)
f.write(cmd+'\n')
f.close()
if __name__ == '__main__':
main()

View File

@ -0,0 +1,211 @@
#!/usr/bin/env python3
# David Bekaert
import os
import glob
import argparse
from uncompressFile import uncompressfile
import shutil
import xml.etree.ElementTree as etree
import unpackFrame_LT1AB
def createParser():
'''
Create command line parser.
'''
parser = argparse.ArgumentParser(description='Prepare LT1AB SLC processing (unzip/untar files, organize in date folders, generate script to unpack into isce formats). For now, it cannot merge multiple scenes')
parser.add_argument('-i', '--input', dest='input', type=str, required=False,
help='directory with the slc data')
parser.add_argument('-rmfile', '--rmfile', dest='rmfile',action='store_true', default=False,
help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)')
parser.add_argument('-o', '--output', dest='output', type=str, required=False,
help='output directory where data needs to be unpacked into isce format (for script generation).')
parser.add_argument('--linux',dest="linux", action='store_true', default=True, help='run in linux')
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, required=False, default='source ~/.bash_profile;',
help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;')
return parser
def cmdLineParse(iargs=None):
'''
Command line parser.
'''
parser = createParser()
return parser.parse_args(args = iargs)
def get_Date(LT1ABfolder):
# will search for different version of workreport to be compatible with ASf, WInSAR etc
LT1ABfile = glob.glob(os.path.join(LT1ABfolder,'*.meta.xml'))
# if nothing is found return a failure
if len(LT1ABfile) > 0:
LT1ABfile = LT1ABfile[0]
# loading the date information from the product.xml file
tree = etree.parse(LT1ABfile)
root = tree.getroot()
# for attributes in root.iter('{http://www.rsi.ca/rs2/prod/xml/schemas}sensor'):
# attribute_list = list(attributes)
# for attribute in attribute_list:
# if attribute.tag=='{http://www.rsi.ca/rs2/prod/xml/schemas}rawDataStartTime':
# date = attribute.text
# UTC = date[11:16]
# acquisitionDate = date[0:4]+date[5:7]+date[8:10]
image_time = root.find('productInfo').find('sceneInfo').find('start').find('timeUTC').text
if image_time != None:
acquisitionDate = image_time[0:4]+image_time[5:7]+image_time[8:10]
if len(acquisitionDate)==8:
successflag = True
return successflag, acquisitionDate
# if it reached here it could not find the acqusiitionDate
successflag = False
acquisitionDate = 'FAIL'
return successflag, acquisitionDate
def main(iargs=None):
'''
The main driver.
'''
inps = cmdLineParse(iargs)
# parsing required inputs
inputDir = os.path.abspath(inps.input)
# parsing optional inputs
if inps.output:
outputDir = os.path.abspath(inps.output)
else:
outputDir = None
rmfile = inps.rmfile
# inputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/download/'
# inputDir = os.path.abspath(inputDirs)
# outputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/SLC'
# outputDir = os.path.abspath(outputDirs)
# rmfile = False
# filename of the runfile
run_unPack = os.path.join(inputDir, 'run_unPackLT1AB.txt')
# loop over the different folder, LT1AB zip/tar files and unzip them, make the names consistent
LT1AB_extensions = (os.path.join(inputDir, 'LT1*.zip'),os.path.join(inputDir, 'LT1*.tar'),os.path.join(inputDir, 'LT1*.gz'))
for LT1AB_extension in LT1AB_extensions:
LT1AB_filesfolders = glob.glob(LT1AB_extension)
for LT1AB_infilefolder in LT1AB_filesfolders:
## the path to the folder/zip
workdir = os.path.dirname(LT1AB_infilefolder)
## get the output name folder without any extensions
temp = os.path.basename(LT1AB_infilefolder)
# trim the extensions and keep only very first part
parts = temp.split(".tar.gz")
parts = parts[0].split('-')
LT1AB_outfolder = parts[0]
# add the path back in
LT1AB_outfolder = os.path.join(workdir,LT1AB_outfolder)
# loop over two cases (either file or folder):
### this is a file, try to unzip/untar it
if os.path.isfile(LT1AB_infilefolder):
# unzip the file in the outfolder
successflag_unzip = uncompressfile(LT1AB_infilefolder,LT1AB_outfolder)
# put failed files in a seperate directory
if not successflag_unzip:
os.makedirs(os.path.join(workdir,'FAILED_FILES'), exist_ok=True)
os.rename(LT1AB_infilefolder,os.path.join(workdir,'FAILED_FILES','.'))
else:
# check if file needs to be removed or put in archive folder
if rmfile:
os.remove(LT1AB_infilefolder)
# print('Deleting: ' + LT1AB_infilefolder)
else:
os.makedirs(os.path.join(workdir,'ARCHIVED_FILES'), exist_ok=True)
# cmd = 'mv ' + LT1AB_infilefolder + ' ' + os.path.join(workdir,'ARCHIVED_FILES','.')
# os.system(cmd)
shutil.move(LT1AB_infilefolder, os.path.join(workdir,'ARCHIVED_FILES','.'))
# loop over the different LT1AB folders and make sure the folder names are consistent.
# this step is not needed unless the user has manually unzipped data before.
LT1AB_folders = glob.glob(os.path.join(inputDir, 'LT1*'))
for LT1AB_folder in LT1AB_folders:
# in case the user has already unzipped some files, make sure they are unzipped similar like the uncompressfile code
temp = os.path.basename(LT1AB_folder)
parts = temp.split(".tar.gz")
parts = parts[0].split('-')
LT1AB_outfolder_temp = parts[0]
LT1AB_outfolder_temp = os.path.join(os.path.dirname(LT1AB_folder),LT1AB_outfolder_temp)
# check if the folder (LT1AB_folder) has a different filename as generated from the uncompressFile code (LT1AB_outfolder_temp)
if not (LT1AB_outfolder_temp == LT1AB_folder):
# it is different, check if the LT1AB_outfolder_temp already exists, if yes, delete the current folder
if os.path.isdir(LT1AB_outfolder_temp):
# print('Remove ' + LT1AB_folder + ' as ' + LT1AB_outfolder_temp + ' exists...')
# check if this folder already exist, if so overwrite it
shutil.rmtree(LT1AB_folder)
# loop over the different LT1AB folders and organize in date folders
LT1AB_folders = glob.glob(os.path.join(inputDir, 'LT1*'))
for LT1AB_folder in LT1AB_folders:
# get the date
successflag, imgDate = get_Date(LT1AB_folder)
workdir = os.path.dirname(LT1AB_folder)
if successflag:
# move the file into the date folder
SLC_dir = os.path.join(workdir,imgDate,'')
if os.path.isdir(SLC_dir):
shutil.rmtree(SLC_dir)
# cmd = 'mv ' + LT1AB_folder + ' ' + SLC_dir
# os.system(cmd)
shutil.move(LT1AB_folder, SLC_dir)
print ('Succes: ' + imgDate)
else:
print('Failed: ' + LT1AB_folder)
if inps.linux:
# now generate the unpacking script for all the date dirs
dateDirs = glob.glob(os.path.join(inputDir,'2*'))
if outputDir is not None:
f = open(run_unPack,'w')
for dateDir in dateDirs:
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tiff'))
if len(LT1ABFiles) <= 0:
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tif'))
if len(LT1ABFiles)>0:
acquisitionDate = os.path.basename(dateDir)
slcDir = os.path.join(outputDir, acquisitionDate)
os.makedirs(slcDir, exist_ok=True)
print("unpackFrame_LT1AB ...")
unpackFrame_LT1AB.mainUnpackFrame(os.path.abspath(dateDir),slcDir)
print("unpackFrame_LT1AB finish!!!")
f.close()
else:
# now generate the unpacking script for all the date dirs
dateDirs = glob.glob(os.path.join(inputDir,'2*'))
if outputDir is not None:
f = open(run_unPack,'w')
for dateDir in dateDirs:
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tiff'))
if len(LT1ABFiles) <= 0:
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tif'))
if len(LT1ABFiles)>0:
acquisitionDate = os.path.basename(dateDir)
slcDir = os.path.join(outputDir, acquisitionDate)
os.makedirs(slcDir, exist_ok=True)
cmd = 'unpackFrame_LT1AB.exe -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
result = os.system(cmd)
# f.write(inps.text_cmd + cmd+'\n')
print(cmd, result)
f.write(cmd+'\n')
f.close()
if __name__ == '__main__':
main()

View File

@ -65,19 +65,24 @@ def main(iargs=None):
RSAT2_str2 = 'imagery_HH.tif' # RSAT2 extracted files
TSX_TDX_str = 'dims_op*' # TSX zip files
TSX_TDX_str2 = 'T*X*.xml' # TSX extracted files
GF3_str = 'GF3*'
GF3_str2 = 'GF3*.meta.xml'
# combine together
sensor_str_list = (ENV_str,ALOS1_str,CSK_str,CSK_str2,RSAT2_str,RSAT2_str2,TSX_TDX_str,TSX_TDX_str2)
sensor_list = ('Envisat','ALOS1','CSK','CSK','RSAT2','RSAT2','TSX/TDX','TSX/TDX')
sensor_unpackcommand = ('TODO','TODO','TODO','TODO','prepSlcRSAT2.py','prepSlcRSAT2.py','TODO','TODO')
sensor_str_list = (ENV_str,ALOS1_str,CSK_str,CSK_str2,RSAT2_str,RSAT2_str2,TSX_TDX_str,TSX_TDX_str2,GF3_str,GF3_str2)
sensor_list = ('Envisat','ALOS1','CSK','CSK','RSAT2','RSAT2','TSX/TDX','TSX/TDX','GF3','GF3')
sensor_unpackcommand = ('TODO','TODO','TODO','TODO','prepSlcRSAT2.py','prepSlcRSAT2.py','TODO','TODO','prepSlcGF3.exe','prepSlcGF3.exe')
Sensors = dict(zip(sensor_str_list,sensor_list))
Sensors_unpack = dict(zip(sensor_str_list,sensor_unpackcommand))
# Loop over the different sensor strings and try to find them
sensor_found = False
for sensor_str in Sensors:
for file in glob.iglob(os.path.join(inputDir,'**',sensor_str),recursive=True):
files = glob.iglob(os.path.join(inputDir,'**',sensor_str),recursive=True)
for file in files:
sensor_found = True
print(file)
sensor_str_keep = sensor_str
break

View File

@ -6,6 +6,9 @@ import argparse
import os
import shelve
import logging
import glob
import shutil
from osgeo import gdal
def createParser():
parser = argparse.ArgumentParser( description='Duplicating the reference SLC')
@ -47,15 +50,24 @@ def main(iargs=None):
os.makedirs(referenceShelveDir, exist_ok=True)
os.makedirs(secondaryShelveDir, exist_ok=True)
sec_files = glob.glob(os.path.join(inDir, 'data*'))
for file in sec_files:
shutil.copy(file, secondaryShelveDir)
# cmd = 'cp '+ inDir + '/data* ' + secondaryShelveDir
# os.system(cmd)
cmd = 'cp '+ inDir + '/data* ' + secondaryShelveDir
os.system(cmd)
cmd = 'cp '+ inDir + '/data* ' + referenceShelveDir
os.system(cmd)
ref_files = glob.glob(os.path.join(inDir, 'data*'))
for file in ref_files:
shutil.copy(file, referenceShelveDir)
# cmd = 'cp '+ inDir + '/data* ' + referenceShelveDir
# os.system(cmd)
translate_options = gdal.TranslateOptions(format='ENVI')
gdal.Translate(inps.output_slc, inps.input_slc, options=translate_options)
cmd = 'gdal_translate -of ENVI ' + inps.input_slc + " " + inps.output_slc
os.system(cmd)
translate_options = gdal.TranslateOptions(format='VRT')
gdal.Translate(inps.output_slc + ".vrt", inps.output_slc, options=translate_options)
cmd = 'gdal_translate -of VRT ' + inps.output_slc + " " + inps.output_slc + ".vrt"
os.system(cmd)

View File

@ -7,6 +7,8 @@ import isce
import isceobj
import shelve
import datetime
import shutil
import glob
from isceobj.Location.Offset import OffsetField
from iscesys.StdOEL.StdOELPy import create_writer
from mroipac.ampcor.Ampcor import Ampcor
@ -125,6 +127,7 @@ def estimateOffsetField(reference, secondary, azoffset=0, rgoffset=0):
sim.finalizeImage()
result = objOffset.getOffsetField()
return result
@ -186,15 +189,22 @@ def main(iargs=None):
referenceShelveDir = os.path.join(outDir, 'referenceShelve')
os.makedirs(referenceShelveDir, exist_ok=True)
cmd = 'cp ' + inps.metareference + '/data* ' + referenceShelveDir
os.system(cmd)
ref_files = glob.glob(os.path.join(inps.metareference, 'data*'))
for file in ref_files:
shutil.copy(file, referenceShelveDir)
# cmd = 'cp ' + inps.metareference + '/data* ' + referenceShelveDir
# os.system(cmd)
if inps.metasecondary is not None:
secondaryShelveDir = os.path.join(outDir, 'secondaryShelve')
os.makedirs(secondaryShelveDir, exist_ok=True)
cmd = 'cp ' + inps.metasecondary + '/data* ' + secondaryShelveDir
os.system(cmd)
sec_files = glob.glob(os.path.join(inps.metasecondary, 'data*'))
for file in sec_files:
shutil.copy(file, secondaryShelveDir)
#cmd = 'cp ' + inps.metasecondary + '/data* ' + secondaryShelveDir
#os.system(cmd)
rgratio = 1.0
azratio = 1.0

View File

@ -6,7 +6,8 @@ import shelve
import json
import logging
import numpy as np
import shutil
import glob
import isce
import isceobj
import stdproc
@ -156,7 +157,7 @@ def resampSecondary(burst, offdir, outname, doppler, azpoly, rgpoly,
rObj.referenceStartingRange = reference.startingRange
rObj.referenceSlantRangePixelSpacing = reference.getInstrument().getRangePixelSize()
rObj.referenceWavelength = reference.getInstrument().getRadarWavelength()
rObj.resamp_slc(imageOut=imgOut)
imgOut.renderHdr()
@ -178,13 +179,17 @@ def main(iargs=None):
os.makedirs(referenceShelveDir, exist_ok=True)
os.makedirs(secondaryShelveDir, exist_ok=True)
cmd = 'cp '+ inps.secondary + '/data* ' + secondaryShelveDir
print (cmd)
os.system(cmd)
cmd = 'cp '+ inps.reference + '/data* ' + referenceShelveDir
os.system(cmd)
sec_files = glob.glob(os.path.join(inps.secondary, 'data*'))
for file in sec_files:
shutil.copy(file, secondaryShelveDir)
# cmd = 'cp '+ inps.secondary + '/data* ' + secondaryShelveDir
# print (cmd)
# os.system(cmd)
ref_files = glob.glob(os.path.join(inps.reference, 'data*'))
for file in ref_files:
shutil.copy(file, referenceShelveDir)
# cmd = 'cp '+ inps.reference + '/data* ' + referenceShelveDir
# os.system(cmd)
# with shelve.open(os.path.join(inps.secondary, 'data'), flag='r') as sdb:
with shelve.open(os.path.join(secondaryShelveDir, 'data'), flag='r') as sdb:

View File

@ -10,15 +10,17 @@ import numpy as np
import shelve
# suppress matplotlib DEBUG message
from matplotlib.path import Path as Path
#from matplotlib.path import Path as Path
import logging
mpl_logger = logging.getLogger('matplotlib')
mpl_logger.setLevel(logging.WARNING)
import isce
import isceobj
from mroipac.baseline.Baseline import Baseline
from stripmapStack.Stack import config, run, selectPairs
from mroipac.baseline.Baseline import Baseline # 原始代码
# from components.mroipac.baseline.Baseline import Baseline
from Stack import config, run, selectPairs
filtStrength = '0.8'
@ -88,7 +90,7 @@ def createParser():
iono.add_argument('--filter_kernel_rotation', dest='filterKernelRotation', type=str, default='0.0',
help='rotation angle of the filter kernel in degrees (default = 0.0)')
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='slc',
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram',
help='The InSAR processing workflow : (slc, interferogram, ionosphere)')
parser.add_argument('-z', '--zero', dest='zerodop', action='store_true', default=False,
@ -308,7 +310,7 @@ def interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondary
def main(iargs=None):
inps = cmdLineParse(iargs)
# name of the folder of the coreg SLCs including baselines, SLC, geom_reference subfolders
# name of the folder of the coreg SLCs including baselines, SLC, geom_reference subfoldersget_dates
inps.stack_folder = 'merged'
inps.dense_offsets_folder = 'dense_offsets'
@ -335,8 +337,10 @@ def main(iargs=None):
os.makedirs(runDir, exist_ok=True)
if inps.sensor and inps.sensor.lower().startswith('uavsar'): # don't try to calculate baselines for UAVSAR_STACK data
print('doBaselines is False')
pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=False)
else:
print('doBaselines is True')
pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=True)
print ('number of pairs: ', len(pairs))
@ -352,6 +356,7 @@ def main(iargs=None):
#############################
if inps.workflow == 'slc':
print("SLC")
slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False)
elif inps.workflow == 'interferogram':

View File

@ -3,6 +3,8 @@ import os, sys
from importlib import util as importlibutil
import argparse
import configparser
from createWaterMask import main as Watermain
import head
# Deals with Configuration file
@ -48,6 +50,11 @@ class ConfigParser:
print("Running: %s"%ifunc)
print(self.funcParams[section])
func_modules = self.__import(ifunc)
# if ifunc == 'createWaterMask' and func_modules is None:
# print('createWaterMask loda failed--------------')
# Watermain(self.funcParams[section])
# else:
# func_modules.main(self.funcParams[section])
func_modules.main(self.funcParams[section])
@ -102,6 +109,7 @@ class ConfigParser:
except ImportError:
print('module {} not found'.format(name))
# Check existence of the input file
def check_if_files_exist(Files, ftype='input'):
for ifile in Files:

View File

@ -0,0 +1,99 @@
#!/usr/bin/env python3
# import isce
from isce.components.isceobj.Sensor import createSensor
import shelve
import argparse
import glob
from isce.components.isceobj.Util import Poly1D
from isce.components.isceobj.Planet.AstronomicalHandbook import Const
from isce.components.isceobj.Util.decorators import use_api
import os
def cmdLineParse():
'''
Command line parser.
'''
parser = argparse.ArgumentParser(description='Unpack GF3 SLC data and store metadata in pickle file.')
parser.add_argument('-i','--input', dest='RSATdir', type=str,
required=True, help='Input GF3 SLC directory')
parser.add_argument('-o', '--output', dest='slcdir', type=str,
required=True, help='Output unpacked SLC directory')
return parser.parse_args()
@use_api
def unpack(RSATdir, slcname):
'''
Unpack GF3 data to binary SLC file. assume HH only for now
'''
###Search for imagery and XML files in input directory
imgnames = glob.glob(os.path.join(RSATdir,'GF3*.tiff'))
if len(imgnames) <= 0:
imgnames = glob.glob(os.path.join(RSATdir,'GF3*.tif'))
imgname = imgnames[0]
xmlname = glob.glob(os.path.join(RSATdir, 'GF3*.meta.xml'))[0]
####Create output SLC directory if needed
if not os.path.isdir(slcname):
os.mkdir(slcname)
date = os.path.basename(slcname)
#####Create an GF3 object and wire it
obj = createSensor('GF3_SLC')
obj.configure()
obj.xml = xmlname
obj.tiff = imgname
obj.output = os.path.join(slcname, date+'.slc')
####Extract the image and write the XML file for the SLC
obj.extractImage()
obj.frame.getImage().renderHdr()
####Save the doppler polynomial
####CEOS already provides doppler polynomial
####as a function of range pixel
coeffs = obj.doppler_coeff
poly = Poly1D.Poly1D()
poly.initPoly(order=len(coeffs)-1)
poly.setCoeffs(coeffs)
####Save the FMrate polynomial
####CEOS already provides FMrate polynomial
####as a function of range pixel
fcoeffs = obj.azfmrate_coeff
# fcoeffs = [0.0, 0.0, 0.0] # zero-Doppler geometry, so this is not used
fpoly = Poly1D.Poly1D()
fpoly.initPoly(order=len(fcoeffs)-1)
fpoly.setCoeffs(fcoeffs)
####Save required metadata for further use
####All data is output to a shelve file
pickName = os.path.join(slcname, 'data')
with shelve.open(pickName, "wc") as db:
db['frame'] = obj.frame
db['doppler'] = poly
db['fmrate'] = fpoly
if __name__ == '__main__':
'''
Main driver.
'''
inps = cmdLineParse()
if inps.slcdir.endswith('/'):
inps.slcdir = inps.slcdir[:-1]
if inps.RSATdir.endswith('/'):
inps.RSATdir = inps.RSATdir[:-1]
unpack(inps.RSATdir, inps.slcdir)

View File

@ -0,0 +1,117 @@
#!/usr/bin/env python3
import isce
from isce.components.isceobj.Sensor import createSensor
import shelve
import argparse
import glob
import datetime
from isce.components.isceobj.Util import Poly1D
from isce.components.isceobj.Planet.AstronomicalHandbook import Const
from isce.components.isceobj.Util.decorators import use_api
from imageMath import IML
from isceobj.Orbit import Orbit
from isceobj.Util.Poly2D import Poly2D
from isceobj.Planet.Planet import Planet
from isceobj.Constants import SPEED_OF_LIGHT
import os
from isceobj.Planet.AstronomicalHandbook import Const
def cmdLineParse():
'''
Command line parser.
'''
parser = argparse.ArgumentParser(description='Unpack LT1 SLC data and store metadata in pickle file.')
parser.add_argument('-i','--input', dest='RSATdir', type=str,
required=True, help='Input LT1 SLC directory')
parser.add_argument('-o', '--output', dest='slcdir', type=str,
required=True, help='Output unpacked SLC directory')
return parser.parse_args()
@use_api
def unpack(RSATdir, slcname):
'''
Unpack LT1 data to binary SLC file. assume HH only for now
'''
###Search for imagery and XML files in input directory
imgnames = glob.glob(os.path.join(RSATdir,'LT1*.tiff'))
if len(imgnames) <= 0:
imgnames = glob.glob(os.path.join(RSATdir,'LT1*.tif'))
imgname = imgnames[0]
xmlname = glob.glob(os.path.join(RSATdir, 'LT1*.meta.xml'))[0]
####Create output SLC directory if needed
if not os.path.isdir(slcname):
os.mkdir(slcname)
date = os.path.basename(slcname)
#####Create an LT1 object and wire it
obj = createSensor('LT1ABLT1ABREPEAT')
obj.configure()
obj.xml = xmlname
obj.tiff = imgname
obj.output = os.path.join(slcname, date+'.slc')
####Extract the image and write the XML file for the SLC
obj.extractImage()
obj.frame.getImage().renderHdr()
####Save the doppler polynomial
####CEOS already provides doppler polynomial
####as a function of range pixel
coeffs = obj.doppler_coeff
poly = Poly1D.Poly1D()
poly.initPoly(order=len(coeffs)-1)
poly.setCoeffs(coeffs)
####Save the FMrate polynomial
####CEOS already provides FMrate polynomial
####as a function of range pixel
fcoeffs = obj.azfmrate_coeff
# fcoeffs = [0.0, 0.0, 0.0] # zero-Doppler geometry, so this is not used
fpoly = Poly1D.Poly1D()
fpoly.initPoly(order=len(fcoeffs)-1)
fpoly.setCoeffs(fcoeffs)
####Save required metadata for further use
####All data is output to a shelve file
pickName = os.path.join(slcname, 'data')
with shelve.open(pickName, "c") as db:
db['frame'] = obj.frame
db['doppler'] = poly
db['fmrate'] = fpoly
def mainUnpackFrame(RSATdir,slcdir):
if slcdir.endswith('/'):
slcdir = slcdir[:-1]
if RSATdir.endswith('/'):
RSATdir = RSATdir[:-1]
unpack(RSATdir, slcdir)
if __name__ == '__main__':
'''
Main driver.
'''
inps = cmdLineParse()
if inps.slcdir.endswith('/'):
inps.slcdir = inps.slcdir[:-1]
if inps.RSATdir.endswith('/'):
inps.RSATdir = inps.RSATdir[:-1]
unpack(inps.RSATdir, inps.slcdir)

View File

@ -29,19 +29,21 @@
import os
import sys
import time
import argparse
import shelve
import isce
import sys
import isceobj
from isceobj.Constants import SPEED_OF_LIGHT
from contrib.Snaphu.Snaphu import Snaphu
from isceobj.Constants import SPEED_OF_LIGHT
import argparse
import os
import pickle
import sys
import shelve
import glob
import shutil
#from contrib.UnwrapComp.unwrapComponents import UnwrapComponents
def createParser():
'''
Create command line parser.
@ -89,6 +91,9 @@ def extractInfoFromPickle(pckfile, inps):
from isceobj.Planet.Planet import Planet
from isceobj.Util.geo.ellipsoid import Ellipsoid
# with open(pckfile, 'rb') as f:
# frame = pickle.load(f)
with shelve.open(pckfile,flag='r') as db:
# frame = db['swath']
burst = db['frame']
@ -129,12 +134,12 @@ def extractInfoFromPickle(pckfile, inps):
return data
def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = None, defomax = None, initOnly = None):
print("costMode is ", costMode)
if costMode is None:
costMode = 'DEFO'
if initMethod is None:
initMethod = 'MST'
initMethod = 'MCF'
if defomax is None:
defomax = 4.0
@ -208,6 +213,10 @@ def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = Non
def runUnwrapMcf(infile, outfile, corfile, config, defomax=2):
runUnwrap(infile, outfile, corfile, config, costMode = 'DEFO',initMethod = 'MCF', defomax = defomax, initOnly = True)
return
def runUnwrapMcf_smooth(infile, outfile, corfile, config, defomax=2):
runUnwrap(infile, outfile, corfile, config, costMode = 'SMOOTH',initMethod = 'MCF', defomax = defomax, initOnly = True)
return
@ -223,7 +232,7 @@ def runUnwrapIcu(infile, outfile):
img.load(infile + '.xml')
width = img.getWidth()
width = img.getWidth()
#intImage
intImage = isceobj.createIntImage()
@ -289,7 +298,6 @@ def main(iargs=None):
'''
The main driver.
'''
start_time = time.time()
inps = cmdLineParse(iargs)
print(inps.method)
@ -306,8 +314,11 @@ def main(iargs=None):
os.makedirs(referenceShelveDir, exist_ok=True)
inps.reference = os.path.dirname(inps.reference)
cpCmd='cp ' + os.path.join(inps.reference, 'data*') +' '+referenceShelveDir
os.system(cpCmd)
sec_files = glob.glob(os.path.join(inps.reference, 'data*'))
for file in sec_files:
shutil.copy(file, referenceShelveDir)
#cpCmd='cp ' + os.path.join(inps.reference, 'data*') +' '+referenceShelveDir
#os.system(cpCmd)
pckfile = os.path.join(referenceShelveDir,'data')
print(pckfile)
metadata = extractInfoFromPickle(pckfile, inps)
@ -316,7 +327,7 @@ def main(iargs=None):
print ('unwrapping method : ' , inps.method)
if inps.method == 'snaphu':
if inps.nomcf:
fncall = runUnwrap
fncall = runUnwrapMcf_smooth
else:
fncall = runUnwrapMcf
fncall(inps.intfile, inps.unwprefix + '_snaphu.unw', inps.cohfile, metadata, defomax=inps.defomax)
@ -336,10 +347,6 @@ def main(iargs=None):
elif inps.method == 'icu':
runUnwrapIcu(inps.intfile, inps.unwprefix + '_icu.unw')
# time usage
m, s = divmod(time.time() - start_time, 60)
print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
if __name__ == '__main__':

0
off.log Normal file
View File

0
sim.log Normal file
View File

BIN
test_mask.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 99 KiB