增加HJ2星的代码适配

LT1AB
ChenZenghui 2025-02-05 09:37:37 +08:00
parent a8137a23f6
commit 936b0ee9ac
25 changed files with 191 additions and 205 deletions

2
.vscode/launch.json vendored
View File

@ -96,7 +96,7 @@
"name": "Python: run 08 20230404", "name": "Python: run 08 20230404",
"type": "python", "type": "python",
"request": "launch", "request": "launch",
"program": "/mnt/d/codestorage/isce2/isce2/contrib/stack/stripmapStack/stripmapWrapper.py", "program": "/mnt/d/estar-proj/microproduct_depdence/ISCE_INSAR/contrib/stack/stripmapStack/stripmapWrapper.py",
"console": "integratedTerminal", "console": "integratedTerminal",
"justMyCode": false, "justMyCode": false,
"args": [ "args": [

View File

@ -463,7 +463,7 @@ class Orbit(Component):
"Orbit interpolation type %s, is not implemented" % method "Orbit interpolation type %s, is not implemented" % method
) )
return newSV return newSV
elif self.sessionMode=="LT1B" or self.setsessionMode=="LT1A": elif self.sessionMode=="LT1B" or self.sessionMode=="LT1A":
return self.getSatelliteSpaceState(time) return self.getSatelliteSpaceState(time)
## Isn't orbit redundant? -compute the method based on name ## Isn't orbit redundant? -compute the method based on name
def interpolate(self, time, method='linear'): def interpolate(self, time, method='linear'):
@ -1135,7 +1135,7 @@ class Orbit(Component):
dr2 = pos2-xyz dr2 = pos2-xyz
rng = np.linalg.norm(dr2) # 斜距 rng = np.linalg.norm(dr2) # 斜距
FdTheory2= -2/(rng*wvl)*np.dot(dr2,vel2) FdTheory2= -2/(rng*wvl)*np.dot(dr2,vel2)
TSR= rng * 2 / LIGHTSPEED - self.refrenceTime # nx1 TSR= rng * 2 / LIGHTSPEED - self.refrenceTime # nx1
FdNumerical=0 FdNumerical=0
# FdNumerical=FdNumerical+self.dopperPoly[0]*TSR**0 # FdNumerical=FdNumerical+self.dopperPoly[0]*TSR**0
@ -1144,7 +1144,7 @@ class Orbit(Component):
# FdNumerical=FdNumerical+self.dopperPoly[3]*TSR**3 # FdNumerical=FdNumerical+self.dopperPoly[3]*TSR**3
fdopper_grad=(FdTheory1 - FdTheory2)/dt fdopper_grad=(FdTheory1 - FdTheory2)/dt
inc_t = (FdTheory2-FdNumerical) /fdopper_grad inc_t = (FdTheory2-FdNumerical) /fdopper_grad
# print(inc_t,rng,FdNumerical,FdTheory2,tguess,pos2) # print(inc_t,rng,FdNumerical,FdTheory2,tguess,pos2)
tguess = tguess - datetime.timedelta(seconds = inc_t) tguess = tguess - datetime.timedelta(seconds = inc_t)
@ -1183,11 +1183,11 @@ class Orbit(Component):
fdop = doppler(DTU.seconds_since_midnight(tguess),rng)* wvl * 0.5 fdop = doppler(DTU.seconds_since_midnight(tguess),rng)* wvl * 0.5
# print("doppler", doppler(DTU.seconds_since_midnight(tguess),rng)) # print("doppler", doppler(DTU.seconds_since_midnight(tguess),rng))
# print("wvl", wvl) # print("wvl", wvl)
# print("fdop", fdop) print("fdop", fdop)
fdopder = (0.5*wvl*doppler(DTU.seconds_since_midnight(tguess),rng+10.0) - fdop) / 10.0 fdopder = (0.5*wvl*doppler(DTU.seconds_since_midnight(tguess),rng+10.0) - fdop) / 10.0
# print("doppler2", doppler(DTU.seconds_since_midnight(tguess),rng+10.0)) # print("doppler2", doppler(DTU.seconds_since_midnight(tguess),rng+10.0))
# print("fdopder", fdopder) print("fdopder", fdopder)
fn = dopfact - fdop * rng fn = dopfact - fdop * rng
c1 = -np.dot(vel, vel) c1 = -np.dot(vel, vel)
c2 = (fdop/rng + fdopder) c2 = (fdop/rng + fdopder)
@ -1200,7 +1200,7 @@ class Orbit(Component):
tguess = tguess - datetime.timedelta(seconds = fn/fnprime) tguess = tguess - datetime.timedelta(seconds = fn/fnprime)
# print("输出的tguess", tguess) # print("输出的tguess", tguess)
# print(outOfBounds) # print(outOfBounds)
# print("end ------------------------------------------------------------\n") print("end ------------------------------------------------------------\n")
if outOfBounds: if outOfBounds:
raise Exception('Interpolation time out of bounds') raise Exception('Interpolation time out of bounds')

View File

@ -1308,7 +1308,7 @@ class _prodInfo(GF3_SLCNamespace):
elif z.tag == 'WidthInMeters': elif z.tag == 'WidthInMeters':
self.WidthInMeters = float(z.text) self.WidthInMeters = float(z.text)
if z.tag == 'productLevel': if z.tag == 'productLevel':
self.productLevel = int(z.text) self.productLevel = 1 # 设置默认值
elif z.tag == 'productType': elif z.tag == 'productType':
self.productType = z.text self.productType = z.text
elif z.tag == 'productFormat': elif z.tag == 'productFormat':

View File

@ -659,7 +659,7 @@ class LT1ABLT1ABREPEAT(Sensor):
''' '''
# Initialize orbit objects # Initialize orbit objects
# Read into temp orbit first. # Read into temp orbit first.frame
# Radarsat 2 needs orbit extensions. # Radarsat 2 needs orbit extensions.
print("构建新的轨道程序代码:") print("构建新的轨道程序代码:")
@ -672,6 +672,8 @@ class LT1ABLT1ABREPEAT(Sensor):
As_arr=tempOrbit.A_arr.tolist() As_arr=tempOrbit.A_arr.tolist()
""" 构建轨道 """ """ 构建轨道 """
self.frame.getOrbit().setsessionMode(self.product.mission) self.frame.getOrbit().setsessionMode(self.product.mission)
print("卫星型号")
print(self.frame.orbit.sessionMode)
self.frame.getOrbit().setPolyParams(tempOrbit.polynum,tempOrbit.starttime,As_arr) self.frame.getOrbit().setPolyParams(tempOrbit.polynum,tempOrbit.starttime,As_arr)
self.frame.getOrbit().setDoppler(self.product.dopplerRateValues.dopplerRateReferenceTime, self.frame.getOrbit().setDoppler(self.product.dopplerRateValues.dopplerRateReferenceTime,
self.product.dopplerRateValues.dopplerRateValuesCoefficients) self.product.dopplerRateValues.dopplerRateValuesCoefficients)

View File

@ -100,6 +100,7 @@ createICEYE_SLC = partial(factory_template, 'ICEYE_SLC')
createUAVSAR_Hdf5_SLC = partial(factory_template, 'UAVSAR_HDF5_SLC') createUAVSAR_Hdf5_SLC = partial(factory_template, 'UAVSAR_HDF5_SLC')
createSAOCOM_SLC = partial(factory_template, 'SAOCOM_SLC') createSAOCOM_SLC = partial(factory_template, 'SAOCOM_SLC')
createGF3_SLC = partial(factory_template, 'GF3_SLC') createGF3_SLC = partial(factory_template, 'GF3_SLC')
createHJ2_SLC = partial(factory_template, 'HJ2_SLC')
createLT1ABRepeat = partial(factory_template, 'LT1ABLT1ABREPEAT') createLT1ABRepeat = partial(factory_template, 'LT1ABLT1ABREPEAT')
SENSORS = {'ALOS' : createALOS, SENSORS = {'ALOS' : createALOS,
@ -129,6 +130,7 @@ SENSORS = {'ALOS' : createALOS,
'UAVSAR_HDF5_SLC' : createUAVSAR_Hdf5_SLC, 'UAVSAR_HDF5_SLC' : createUAVSAR_Hdf5_SLC,
'SAOCOM_SLC': createSAOCOM_SLC, 'SAOCOM_SLC': createSAOCOM_SLC,
'GF3_SLC' : createGF3_SLC, 'GF3_SLC' : createGF3_SLC,
'HJ2_SLC' : createHJ2_SLC,
'LT1ABLT1ABREPEAT' : createLT1ABRepeat} 'LT1ABLT1ABREPEAT' : createLT1ABRepeat}
#These are experimental and can be added in as they become ready #These are experimental and can be added in as they become ready

View File

@ -423,7 +423,7 @@ class DenseAmpcor(Component):
firstind = flat_indices[istart:iend,:].ravel()[0] firstind = flat_indices[istart:iend,:].ravel()[0]
lastind = flat_indices[istart:iend,:].ravel()[-1] lastind = flat_indices[istart:iend,:].ravel()[-1]
print(ofmt % (thrd, firstind, lastind, startAc, endAc, startDown, endDown)) # print(ofmt % (thrd, firstind, lastind, startAc, endAc, startDown, endDown))
# Launch job # Launch job
args = (startAc,endAc,startDown,endDown,self.numLocationAcross, args = (startAc,endAc,startDown,endDown,self.numLocationAcross,

View File

@ -33,6 +33,8 @@ from iscesys.Component.Component import Component,Port
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
from stdproc.stdproc.crossmul import crossmul from stdproc.stdproc.crossmul import crossmul
import numpy as np import numpy as np
import isceobj
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
class Crossmul(Component): class Crossmul(Component):
@ -145,9 +147,7 @@ class Crossmul(Component):
if __name__ == '__main__': if __name__ == '__main__':
import isceobj
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
import numpy as np
def load_pickle(step='correct'): def load_pickle(step='correct'):
import cPickle import cPickle

View File

@ -47,7 +47,7 @@ from contrib.demUtils.DemStitcher import DemStitcher
from isceobj.Image import createImage from isceobj.Image import createImage
#Parameters definitions #Parameters definitions
URL = Component.Parameter('_url', URL = Component.Parameter('_url',
public_name = 'URL',default = 'https://e4ftl01.cr.usgs.gov/SRTM/SRTMSWBD.003/2000.02.11', public_name = 'URL',default = 'http://e4ftl01.cr.usgs.gov/SRTM/SRTMSWBD.003/2000.02.11',
type = str, type = str,
mandatory = False, mandatory = False,
doc = "Url for the high resolution water body mask") doc = "Url for the high resolution water body mask")
@ -103,6 +103,29 @@ class SWBDStitcher(DemStitcher):
) )
return swbdName return swbdName
@staticmethod
def toRadarNone(latin,lonin,output):
'''保存雷达的水掩膜版'''
latim = createImage()
latim.load(latin + '.xml')
lonim = createImage()
lonim.load(lonin + '.xml')
lat = np.fromfile(latin,latim.toNumpyDataType())
lon = np.fromfile(lonin,lonim.toNumpyDataType())
mask = np.reshape(lat*0+1,(latim.coord2.coordSize,latim.coord1.coordSize))
#remember mask starts from top left corner
#deltaLat < 0
print("-"*20)
print("Range:",(latim.coord2.coordSize,latim.coord1.coordSize))
print("-"*20)
cropped = mask.reshape((latim.coord2.coordSize,latim.coord1.coordSize)).astype(np.uint8)
cropped.tofile(output)
croppedim = createImage()
croppedim.initImage(output,'read',cropped.shape[1],'BYTE')
croppedim.renderHdr()
pass
@staticmethod @staticmethod
def toRadar(maskin,latin,lonin,output): def toRadar(maskin,latin,lonin,output):
maskim = createImage() maskim = createImage()
@ -121,8 +144,8 @@ class SWBDStitcher(DemStitcher):
deltaLon = maskim.coord1.coordDelta deltaLon = maskim.coord1.coordDelta
#remember mask starts from top left corner #remember mask starts from top left corner
#deltaLat < 0 #deltaLat < 0
lati = np.clip(((lat - startLat)/deltaLat).astype(int), 0, mask.shape[0]-1) lati = np.clip(((lat - startLat)/deltaLat).astype(np.int), 0, mask.shape[0]-1)
loni = np.clip(((lon - startLon)/deltaLon).astype(int), 0, mask.shape[1]-1) loni = np.clip(((lon - startLon)/deltaLon).astype(np.int), 0, mask.shape[1]-1)
cropped = (mask[lati,loni] + 1).astype(maskim.toNumpyDataType()) cropped = (mask[lati,loni] + 1).astype(maskim.toNumpyDataType())
cropped = np.reshape(cropped,(latim.coord2.coordSize,latim.coord1.coordSize)) cropped = np.reshape(cropped,(latim.coord2.coordSize,latim.coord1.coordSize))
cropped.tofile(output) cropped.tofile(output)

View File

@ -33,11 +33,12 @@ import isce
import isceobj import isceobj
import argparse import argparse
import os import os
from mroipac.filter.Filter import Filter
from mroipac.icu.Icu import Icu
logger = logging.getLogger('isce.tops.runFilter') logger = logging.getLogger('isce.tops.runFilter')
def runFilter(infile, outfile, filterStrength): def runFilter(infile, outfile, filterStrength):
from mroipac.filter.Filter import Filter
logger.info("Applying power-spectral filter") logger.info("Applying power-spectral filter")
# Initialize the flattened interferogram # Initialize the flattened interferogram
@ -64,7 +65,7 @@ def runFilter(infile, outfile, filterStrength):
def estCoherence(outfile, corfile): def estCoherence(outfile, corfile):
from mroipac.icu.Icu import Icu
logger.info("Estimating spatial coherence based phase sigma") logger.info("Estimating spatial coherence based phase sigma")
#Create phase sigma correlation file here #Create phase sigma correlation file here

View File

@ -12,7 +12,6 @@ from osgeo import gdal
from osgeo import osr from osgeo import osr
import numpy as np import numpy as np
from PIL import Image from PIL import Image
import cv2
import logging import logging
import math import math
@ -552,67 +551,6 @@ class ImageHandler:
im_arr_mask = (im_arr < threshold).astype(int) im_arr_mask = (im_arr < threshold).astype(int)
self.write_img(out_tif_path, im_proj, im_geotrans, im_arr_mask) self.write_img(out_tif_path, im_proj, im_geotrans, im_arr_mask)
def write_quick_view(self, tif_path, color_img=False, quick_view_path=None):
"""
生成快视图,默认快视图和影像同路径且同名
:param tif_path:影像路径
:param color_img:是否生成随机伪彩色图
:param quick_view_path:快视图路径
"""
if quick_view_path is None:
quick_view_path = os.path.splitext(tif_path)[0]+'.jpg'
n = self.get_bands(tif_path)
if n == 1: # 单波段
t_data = self.get_data(tif_path)
else: # 多波段,转为强度数据
t_data = self.get_data(tif_path)
t_data = t_data.astype(float)
t_data = np.sqrt(t_data[0] ** 2 + t_data[1] ** 2)
t_r = self.get_img_height(tif_path)
t_c = self.get_img_width(tif_path)
if t_r > 10000 or t_c > 10000:
q_r = int(t_r / 10)
q_c = int(t_c / 10)
elif 1024 < t_r < 10000 or 1024 < t_c < 10000:
if t_r > t_c:
q_r = 1024
q_c = int(t_c/t_r * 1024)
else:
q_c = 1024
q_r = int(t_r/t_c * 1024)
else:
q_r = t_r
q_c = t_c
if color_img is True:
# 生成伪彩色图
img = np.zeros((t_r, t_c, 3), dtype=np.uint8) # (高,宽,维度)
u = np.unique(t_data)
for i in u:
if i != 0:
w = np.where(t_data == i)
img[w[0], w[1], 0] = np.random.randint(0, 255) # 随机生成一个0到255之间的整数 可以通过挑参数设定不同的颜色范围
img[w[0], w[1], 1] = np.random.randint(0, 255)
img[w[0], w[1], 2] = np.random.randint(0, 255)
img = cv2.resize(img, (q_c, q_r)) # (宽,高)
cv2.imwrite(quick_view_path, img)
# cv2.imshow("result4", img)
# cv2.waitKey(0)
else:
# 灰度图
min = np.percentile(t_data, 2) # np.nanmin(t_data)
max = np.percentile(t_data, 98) # np.nanmax(t_data)
t_data[np.isnan(t_data)] = max
if (max - min) < 256:
t_data = (t_data - min) / (max - min) * 255
out_img = Image.fromarray(t_data)
out_img = out_img.resize((q_c, q_r)) # 重采样
out_img = out_img.convert("L") # 转换成灰度图
out_img.save(quick_view_path)
def limit_field(self, out_path, in_path, min_value, max_value): def limit_field(self, out_path, in_path, min_value, max_value):
""" """
:param out_path:输出路径 :param out_path:输出路径

View File

@ -8,7 +8,7 @@ import os
import argparse import argparse
import numpy as np import numpy as np
from scipy import ndimage from scipy import ndimage
import matplotlib.pyplot as plt # import matplotlib.pyplot as plt
from osgeo import gdal from osgeo import gdal
from osgeo.gdalconst import GA_ReadOnly from osgeo.gdalconst import GA_ReadOnly
@ -73,18 +73,18 @@ def createParser():
parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./', parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./',
help='Output directory (default: %(default)s).') help='Output directory (default: %(default)s).')
# plot # # plot
plot = parser.add_argument_group('plot') # plot = parser.add_argument_group('plot')
plot.add_argument('-p', '--plot', dest='plot', action='store_true', default=False, # plot.add_argument('-p', '--plot', dest='plot', action='store_true', default=False,
help='plot the offsets before and after masking and filtering') # help='plot the offsets before and after masking and filtering')
plot.add_argument('-v', dest='vlim', nargs=2, type=float, default=(-0.05, 0.05), # plot.add_argument('-v', dest='vlim', nargs=2, type=float, default=(-0.05, 0.05),
help='display range for offset (default: %(default)s).') # help='display range for offset (default: %(default)s).')
plot.add_argument('--v-snr', dest='vlim_snr', nargs=2, type=float, default=(0, 100), # plot.add_argument('--v-snr', dest='vlim_snr', nargs=2, type=float, default=(0, 100),
help='display range for offset SNR (default: %(default)s).') # help='display range for offset SNR (default: %(default)s).')
plot.add_argument('--figsize', dest='figsize', nargs=2, type=float, default=(18, 5), # plot.add_argument('--figsize', dest='figsize', nargs=2, type=float, default=(18, 5),
help='figure size in inch (default: %(default)s).') # help='figure size in inch (default: %(default)s).')
plot.add_argument('--save', dest='fig_name', type=str, default=None, # plot.add_argument('--save', dest='fig_name', type=str, default=None,
help='save figure as file') # help='save figure as file')
return parser return parser
@ -227,7 +227,6 @@ def mask_filter(inps, band, outName):
def plot_mask_and_filtering(az_list, rg_list, inps=None): def plot_mask_and_filtering(az_list, rg_list, inps=None):
print('-'*30)
print('plotting mask and filtering result ...') print('plotting mask and filtering result ...')
print('mask pixels with SNR == 0 (for plotting ONLY; data files are untouched)') print('mask pixels with SNR == 0 (for plotting ONLY; data files are untouched)')
snr = az_list[0] snr = az_list[0]
@ -239,7 +238,7 @@ def plot_mask_and_filtering(az_list, rg_list, inps=None):
perc = np.sum(snr >= inps.snrThreshold) / np.sum(snr > 0) perc = np.sum(snr >= inps.snrThreshold) / np.sum(snr > 0)
print('percentage of pixels with SNR >= {} among pixels with SNR > 0: {:.0%}'.format(inps.snrThreshold, perc)) print('percentage of pixels with SNR >= {} among pixels with SNR > 0: {:.0%}'.format(inps.snrThreshold, perc))
fig, axs = plt.subplots(nrows=2, ncols=5, figsize=inps.figsize, sharex=True, sharey=True) # fig, axs = plt.subplots(nrows=2, ncols=5, figsize=inps.figsize, sharex=True, sharey=True)
titles = ['SNR', titles = ['SNR',
'offset', 'offset',
'offset (mask {} - {:.0%} remain)'.format(inps.snrThreshold, perc), 'offset (mask {} - {:.0%} remain)'.format(inps.snrThreshold, perc),
@ -248,14 +247,14 @@ def plot_mask_and_filtering(az_list, rg_list, inps=None):
# plot SNR # plot SNR
kwargs = dict(vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu', interpolation='nearest') kwargs = dict(vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu', interpolation='nearest')
im0 = axs[0,0].imshow(snr, **kwargs) # im0 = axs[0,0].imshow(snr, **kwargs)
im0 = axs[1,0].imshow(snr, **kwargs) # im0 = axs[1,0].imshow(snr, **kwargs)
axs[0,0].set_title('SNR', fontsize=12) # axs[0,0].set_title('SNR', fontsize=12)
print('SNR data range: [{}, {}]'.format(np.nanmin(snr), np.nanmax(snr))) # print('SNR data range: [{}, {}]'.format(np.nanmin(snr), np.nanmax(snr)))
# label # # label
axs[0,0].set_ylabel('azimuth', fontsize=12) # axs[0,0].set_ylabel('azimuth', fontsize=12)
axs[1,0].set_ylabel('range', fontsize=12) # axs[1,0].set_ylabel('range', fontsize=12)
# plot offset # plot offset
kwargs = dict(vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet', interpolation='nearest') kwargs = dict(vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet', interpolation='nearest')
@ -305,10 +304,11 @@ def main(iargs=None):
inps.outRange = os.path.join(inps.outDir, inps.outRange) inps.outRange = os.path.join(inps.outDir, inps.outRange)
rg_list = mask_filter(inps, band=[2], outName=inps.outRange) rg_list = mask_filter(inps, band=[2], outName=inps.outRange)
# plot result # plot result
if inps.plot: # if inps.plot:
plot_mask_and_filtering(az_list, rg_list, inps) # plot_mask_and_filtering(az_list, rg_list, inps)
return # return
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -697,21 +697,29 @@ def baselinePair(baselineDir, reference, secondary,doBaselines=True):
if doBaselines: # open files to calculate baselines if doBaselines: # open files to calculate baselines
try: try:
mdb = shelve.open( os.path.join(reference, 'raw'), flag='r') with shelve.open(os.path.join(reference, 'raw'), flag='r') as mdb:
sdb = shelve.open( os.path.join(secondary, 'raw'), flag='r') mFrame = mdb['frame']
with shelve.open(os.path.join(secondary, 'raw'), flag='r') as sdb:
sFrame = sdb['frame']
#mdb = shelve.open( os.path.join(reference, 'raw'), flag='r')
#sdb = shelve.open( os.path.join(secondary, 'raw'), flag='r')
except: except:
mdb = shelve.open( os.path.join(reference, 'data'), flag='r') with shelve.open(os.path.join(reference, 'data'), flag='r') as mdb:
sdb = shelve.open( os.path.join(secondary, 'data'), flag='r') mFrame = mdb['frame']
with shelve.open(os.path.join(secondary, 'data'), flag='r') as sdb:
sFrame = sdb['frame']
#mdb = shelve.open( os.path.join(reference, 'data'), flag='r')
#sdb = shelve.open( os.path.join(secondary, 'data'), flag='r')
mFrame = mdb['frame'] #mFrame = mdb['frame']
sFrame = sdb['frame'] #sFrame = sdb['frame']
bObj = Baseline() bObj = Baseline()
bObj.configure() bObj.configure()
# bObj.wireInputPort(name='referenceFrame', object=mFrame) # 原始代码 bObj.wireInputPort(name='referenceFrame', object=mFrame) # 原始代码
# bObj.wireInputPort(name='secondaryFrame', object=sFrame) bObj.wireInputPort(name='secondaryFrame', object=sFrame)
bObj.addReferenceFrame_new(mFrame) # bObj.addReferenceFrame_new(mFrame)
bObj.addSecondaryFrame_new(sFrame) # bObj.addSecondaryFrame_new(sFrame)
bObj.baseline() # calculate baseline from orbits bObj.baseline() # calculate baseline from orbits
pBaselineBottom = bObj.pBaselineBottom pBaselineBottom = bObj.pBaselineBottom
pBaselineTop = bObj.pBaselineTop pBaselineTop = bObj.pBaselineTop

View File

@ -78,9 +78,14 @@ def main(iargs=None):
# check if the reference and secondary shelf are the same, i.e. it is baseline grid for the reference # check if the reference and secondary shelf are the same, i.e. it is baseline grid for the reference
reference_SensingStart = reference.getSensingStart() reference_SensingStart = reference.getSensingStart()
secondary_SensingStart = secondary.getSensingStart() secondary_SensingStart = secondary.getSensingStart()
if reference_SensingStart==secondary_SensingStart: if reference_SensingStart==secondary_SensingStart:
print(reference_SensingStart)
print(secondary_SensingStart)
referenceBaseline = True referenceBaseline = True
else: else:
print(reference_SensingStart)
print(secondary_SensingStart)
referenceBaseline = False referenceBaseline = False
refElp = Planet(pname='Earth').ellipsoid refElp = Planet(pname='Earth').ellipsoid
@ -93,6 +98,10 @@ def main(iargs=None):
mSensingStart = reference.sensingStart # min([x.sensingStart for x in referenceswaths]) mSensingStart = reference.sensingStart # min([x.sensingStart for x in referenceswaths])
mSensingStop = reference.sensingStop #max([x.sensingStop for x in referenceswaths]) mSensingStop = reference.sensingStop #max([x.sensingStop for x in referenceswaths])
mOrb = getMergedOrbit(reference) mOrb = getMergedOrbit(reference)
nRows = reference._numberOfLines
nCols = reference._numberOfSamples
mStartRange = reference._startingRange
nPixels = int(np.round( (mFarRange - mStartingRange)/dr)) + 1 nPixels = int(np.round( (mFarRange - mStartingRange)/dr)) + 1
nLines = int(np.round( (mSensingStop - mSensingStart).total_seconds() / dt)) + 1 nLines = int(np.round( (mSensingStop - mSensingStart).total_seconds() / dt)) + 1
@ -103,36 +112,31 @@ def main(iargs=None):
# To make sure that we have at least 30 points # To make sure that we have at least 30 points
nRange = int(np.max([30, int(np.ceil(rangeLimits/7000.))])) nRange = int(np.max([30, int(np.ceil(rangeLimits/7000.))]))
slantRange = mStartingRange + np.arange(nRange) * rangeLimits / (nRange - 1.0) slantRange = mStartingRange + np.arange(nRange) * rangeLimits / (nRange - 1.0)
azimuthLimits = (mSensingStop - mSensingStart).total_seconds() azimuthLimits = (mSensingStop - mSensingStart).total_seconds()
nAzimuth = int(np.max([30,int(np.ceil(azimuthLimits))])) nAzimuth = int(np.max([30,int(np.ceil(azimuthLimits))]))
# print("mFarRange", mFarRange)
# print("mStartingRange", mStartingRange)
# print("mSensingStart", mSensingStart)
# print("mSensingStop", mSensingStop)
# print("nPixels", nPixels)
# print("nLines", nLines)
# print("dr", dr)
# print("rangeLimits", rangeLimits)
# print("nRange", nRange)
# print("slantRange", slantRange)
# print("azimuthLimits", azimuthLimits)
# print("nAzimuth", nAzimuth)
azimuthTime = [mSensingStart + datetime.timedelta(seconds= x * azimuthLimits/(nAzimuth-1.0)) for x in range(nAzimuth)] azimuthTime = [mSensingStart + datetime.timedelta(seconds= x * azimuthLimits/(nAzimuth-1.0)) for x in range(nAzimuth)]
slantRange_save = [mStartingRange + c * rangeLimits/(nCols - 1.0) for c in range(nCols)]
azimuthLimits = (mSensingStop - mSensingStart).total_seconds()
azimuthTime_save = [mSensingStart + datetime.timedelta(seconds= r * azimuthLimits/(nRows-1.0)) for r in range(nRows)]
# print("azimuthTime", azimuthTime) # print("azimuthTime", azimuthTime)
doppler = Poly2D() doppler = Poly2D()
doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]]) doppler.initPoly(azimuthOrder=0, rangeOrder=0, coeffs=[[0.]])
Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32) Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32)
Bpar = np.zeros((nAzimuth,nRange), dtype=np.float32) Bpar = np.zeros((nAzimuth,nRange), dtype=np.float32)
# Bperp = np.zeros((nRows,nCols), dtype=np.float32)
# Bpar = np.zeros((nRows,nCols), dtype=np.float32)
fid = open(inps.baselineFile, 'wb') fid = open(inps.baselineFile, 'wb')
print('Baseline file {0} dims: {1}L x {2}P'.format(inps.baselineFile, nAzimuth, nRange)) print('Baseline file {0} dims: {1}L x {2}P'.format(inps.baselineFile, nAzimuth, nRange))
# print('Baseline file {0} dims: {1}L x {2}P'.format(inps.baselineFile, nRows, nCols))
if referenceBaseline: if referenceBaseline:
Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32) Bperp = np.zeros((nAzimuth,nRange), dtype=np.float32)
# Bperp = np.zeros((nRows,nCols), dtype=np.float32)
Bperp.tofile(fid) Bperp.tofile(fid)
else: else:
for ii, taz in enumerate(azimuthTime): for ii, taz in enumerate(azimuthTime):
@ -143,9 +147,10 @@ def main(iargs=None):
mvel = np.array(referenceSV.getVelocity()) mvel = np.array(referenceSV.getVelocity())
for jj, rng in enumerate(slantRange): for jj, rng in enumerate(slantRange):
try:
target = mOrb.rdr2geo(taz, rng) target = mOrb.rdr2geo(taz, rng)
except Exception as e:
print(e)
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist()) targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
slvTime, slvrng = sOrb.geo2rdr(target, doppler=doppler, wvl=0) slvTime, slvrng = sOrb.geo2rdr(target, doppler=doppler, wvl=0)
@ -161,7 +166,11 @@ def main(iargs=None):
perp = aa * np.sqrt(1 - costheta*costheta) perp = aa * np.sqrt(1 - costheta*costheta)
direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel)) direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel))
Bperp[ii,jj] = direction*perp Bperp[ii,jj] = direction*perp
print('保存卫星基线\n')
np.save(inps.baselineFile + '_Bpar.npy', Bpar)
np.save(inps.baselineFile + '_Bperp.npy', Bperp)
np.save(inps.baselineFile + '_Range.npy', slantRange_save)
np.save(inps.baselineFile + '_Time.npy', azimuthTime_save)
Bperp.tofile(fid) Bperp.tofile(fid)
fid.close() fid.close()

View File

@ -50,7 +50,7 @@ def cmdLineParse(iargs = None):
return inps return inps
#####Helper functions for geobox manipulation #####Helper functions for geobox manipulation
def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0.02): def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0.00):
''' '''
Convert a geo bounding box - SNWE to pixel limits. Convert a geo bounding box - SNWE to pixel limits.
''' '''
@ -91,7 +91,8 @@ def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0
session=frame.getProcessingSystem() # 数据 session=frame.getProcessingSystem() # 数据
####Do ##Do
print('使用dem数据进行配准')
for combo in combos: for combo in combos:
try: try:
z=ImageHandler.get_pixel_value(demPath,combo[1],combo[0])[0] z=ImageHandler.get_pixel_value(demPath,combo[1],combo[0])[0]
@ -102,7 +103,18 @@ def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0
rgs.append(rgm) rgs.append(rgm)
except Exception as e: except Exception as e:
pass pass
# print(rgs)
# for z in zrange:
# for combo in combos:
# try:
# llh = combo + [z]
# taz, rgm = frame.orbit.geo2rdr(combo + [z], side=lookSide,
# doppler=doppler, wvl=wvl)
# azs.append(taz)
# rgs.append(rgm)
# except Exception as e:
# pass
# print("="*20) # print("="*20)
@ -119,17 +131,17 @@ def geoboxToAzrgbox(frame, geobox,demPath, israw=False, isnative=False, margin=0
temp2 = frame.sensingStart temp2 = frame.sensingStart
temp = (azrgbox[0] - frame.sensingStart).total_seconds() temp = (azrgbox[0] - frame.sensingStart).total_seconds()
ymin = np.floor( (azrgbox[0] - frame.sensingStart).total_seconds() * frame.PRF) ymin = np.floor( (azrgbox[0] - frame.sensingStart).total_seconds() * frame.PRF)
print('Line start: ', ymin) # print('Line start: ', ymin)
ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1)) ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1))
####sensing stop ####sensing stop
ymax = np.ceil( (azrgbox[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1 ymax = np.ceil( (azrgbox[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1
print('Line stop: ', ymax) # print('Line stop: ', ymax)
ymax = np.int32( np.clip(ymax, 1, frame.numberOfLines)) ymax = np.int32( np.clip(ymax, 1, frame.numberOfLines))
print('Line limits: ', ymin, ymax) # print('Line limits: ', ymin, ymax)
print('Original Line Limits: ', 0, frame.numberOfLines) # print('Original Line Limits: ', 0, frame.numberOfLines)
if israw: if israw:
@ -182,7 +194,6 @@ def cropFrame(frame, limits, outdir, israw=False):
outframe = copy.deepcopy(frame) outframe = copy.deepcopy(frame)
if israw: if israw:
factor = 2 factor = 2
else: else:
@ -196,7 +207,6 @@ def cropFrame(frame, limits, outdir, israw=False):
print('Line start: ', ymin) print('Line start: ', ymin)
ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1)) ymin = np.int32( np.clip(ymin, 0, frame.numberOfLines-1))
####sensing stop ####sensing stop
ymax = np.ceil( (limits[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1 ymax = np.ceil( (limits[1] - frame.sensingStart).total_seconds() * frame.PRF) + 1
print('Line stop: ', ymax) print('Line stop: ', ymax)
@ -214,8 +224,11 @@ def cropFrame(frame, limits, outdir, israw=False):
outframe.sensingStop = frame.sensingStop + datetime.timedelta(seconds = (ymax-1)/frame.PRF) outframe.sensingStop = frame.sensingStop + datetime.timedelta(seconds = (ymax-1)/frame.PRF)
outframe.sensingMid = outframe.sensingStart + 0.5 * (outframe.sensingStop - outframe.sensingStart) outframe.sensingMid = outframe.sensingStart + 0.5 * (outframe.sensingStop - outframe.sensingStart)
####starting range ####starting range
# print('计算结果')
# print(limits)
# print(frame.startingRange)
# print(frame.instrument.rangePixelSize)
xmin = np.floor( (limits[2] - frame.startingRange)/frame.instrument.rangePixelSize) xmin = np.floor( (limits[2] - frame.startingRange)/frame.instrument.rangePixelSize)
print('Pixel start: ', xmin) print('Pixel start: ', xmin)
xmin = np.int32(np.clip(xmin, 0, (frame.image.width//factor)-1)) xmin = np.int32(np.clip(xmin, 0, (frame.image.width//factor)-1))
@ -259,9 +272,20 @@ def cropFrame(frame, limits, outdir, israw=False):
outdirname = os.path.dirname(outname) outdirname = os.path.dirname(outname)
os.makedirs(outdirname, exist_ok=True) os.makedirs(outdirname, exist_ok=True)
txt_path = os.path.join(outdir, os.path.splitext(inname)[0] + '.txt')
print(txt_path)
with open(txt_path, 'w') as f:
f.write("{}\n".format(ymin))
f.write("{}\n".format(ymax))
f.write("{}\n".format(xmin))
f.write("{}".format(xmax))
indata = IML.mmapFromISCE(inname, logging) indata = IML.mmapFromISCE(inname, logging)
indata.bands[0][ymin:ymax,xmin*factor:xmax*factor].tofile(outname) indata.bands[0][ymin:ymax,xmin*factor:xmax*factor].tofile(outname)
print(inname)
indata = None indata = None
outframe.image.filename = outname outframe.image.filename = outname
outframe.image.width = outframe.numberOfSamples outframe.image.width = outframe.numberOfSamples

View File

@ -114,8 +114,10 @@ def estimateOffsetField(reference, secondary, inps=None):
objOffset.denseampcor(sar, sim) objOffset.denseampcor(sar, sim)
sar.finalizeImage() sar.finalizeImage()
sim.finalizeImage() sim.finalizeImage()
return objOffset return objOffset
@ -132,7 +134,7 @@ def main(iargs=None):
objOffset = estimateOffsetField(inps.reference, inps.secondary, inps) objOffset = estimateOffsetField(inps.reference, inps.secondary, inps)
print('Top left corner of offset image: ', objOffset.locationDown[0][0],objOffset.locationAcross[0][0]) # print('Top left corner of offset image: ', objOffset.locationDown[0][0],objOffset.locationAcross[0][0])
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -8,7 +8,6 @@ import os, glob , sys
import symtable import symtable
import shelve import shelve
import numpy import numpy
import numpy.matlib
import statistics import statistics
#以下是当前目录的py文件 #以下是当前目录的py文件
@ -29,4 +28,7 @@ import Stack
import topo import topo
import unwrap import unwrap
import MaskAndFilter
import resampleOffsets

View File

@ -49,7 +49,6 @@ def date_list(pairDirs):
dates1 = os.path.basename(di).split('_') dates1 = os.path.basename(di).split('_')
if not dates[0] in dateList: dateList.append(dates[0]) if not dates[0] in dateList: dateList.append(dates[0])
if not dates[1] in dateList: dateList.append(dates[1]) if not dates[1] in dateList: dateList.append(dates[1])
dateList.sort() dateList.sort()
d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5]) d1 = datetime.datetime(*time.strptime(dateList[0],"%Y%m%d")[0:5])
for ni in range(len(dateList)): for ni in range(len(dateList)):

View File

@ -179,7 +179,7 @@ def main(iargs=None):
acquisitionDate = os.path.basename(dateDir) acquisitionDate = os.path.basename(dateDir)
slcDir = os.path.join(outputDir, acquisitionDate) slcDir = os.path.join(outputDir, acquisitionDate)
os.makedirs(slcDir, exist_ok=True) os.makedirs(slcDir, exist_ok=True)
cmd = 'unpackFrame_GF3.exe -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir cmd = 'unpackFrame_GF3.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
result = os.system(cmd) result = os.system(cmd)
# f.write(inps.text_cmd + cmd+'\n') # f.write(inps.text_cmd + cmd+'\n')
print(cmd, result) print(cmd, result)

View File

@ -6,7 +6,6 @@ import argparse
from uncompressFile import uncompressfile from uncompressFile import uncompressfile
import shutil import shutil
import xml.etree.ElementTree as etree import xml.etree.ElementTree as etree
import unpackFrame_LT1AB
def createParser(): def createParser():
@ -21,7 +20,7 @@ def createParser():
help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)') help='Optional: remove zip/tar/compressed files after unpacking into date structure (default is to keep in archive folder)')
parser.add_argument('-o', '--output', dest='output', type=str, required=False, parser.add_argument('-o', '--output', dest='output', type=str, required=False,
help='output directory where data needs to be unpacked into isce format (for script generation).') help='output directory where data needs to be unpacked into isce format (for script generation).')
parser.add_argument('--linux',dest="linux", action='store_true', default=True, help='run in linux') # parser.add_argument('--linux',dest="linux", action='store_true', default=True, help='run in linux')
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, required=False, default='source ~/.bash_profile;', parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, required=False, default='source ~/.bash_profile;',
help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;') help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;')
return parser return parser
@ -80,9 +79,9 @@ def main(iargs=None):
outputDir = None outputDir = None
rmfile = inps.rmfile rmfile = inps.rmfile
# inputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/download/' # inputDirs = r'/mnt/e/MicroWorkspace/HJ2-Deformation/download/'
# inputDir = os.path.abspath(inputDirs) # inputDir = os.path.abspath(inputDirs)
# outputDirs = r'/mnt/e/MicroWorkspace/GF3-Deformation/SLC' # outputDirs = r'/mnt/e/MicroWorkspace/HJ2-Deformation/SLC'
# outputDir = os.path.abspath(outputDirs) # outputDir = os.path.abspath(outputDirs)
# rmfile = False # rmfile = False
@ -167,43 +166,23 @@ def main(iargs=None):
print('Failed: ' + LT1AB_folder) print('Failed: ' + LT1AB_folder)
if inps.linux: # now generate the unpacking script for all the date dirs
# now generate the unpacking script for all the date dirs dateDirs = glob.glob(os.path.join(inputDir,'2*'))
dateDirs = glob.glob(os.path.join(inputDir,'2*')) if outputDir is not None:
if outputDir is not None: f = open(run_unPack,'w')
f = open(run_unPack,'w') for dateDir in dateDirs:
for dateDir in dateDirs: LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tiff'))
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tiff')) if len(LT1ABFiles) <= 0:
if len(LT1ABFiles) <= 0: LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tif'))
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tif')) if len(LT1ABFiles)>0:
if len(LT1ABFiles)>0: acquisitionDate = os.path.basename(dateDir)
acquisitionDate = os.path.basename(dateDir) slcDir = os.path.join(outputDir, acquisitionDate)
slcDir = os.path.join(outputDir, acquisitionDate) os.makedirs(slcDir, exist_ok=True)
os.makedirs(slcDir, exist_ok=True) cmd = 'unpackFrame_LT1AB.py -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
print("unpackFrame_LT1AB ...") # result = os.system(cmd)
unpackFrame_LT1AB.mainUnpackFrame(os.path.abspath(dateDir),slcDir) # print(cmd, result)
print("unpackFrame_LT1AB finish!!!") f.write(cmd+'\n')
f.close() f.close()
else:
# now generate the unpacking script for all the date dirs
dateDirs = glob.glob(os.path.join(inputDir,'2*'))
if outputDir is not None:
f = open(run_unPack,'w')
for dateDir in dateDirs:
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tiff'))
if len(LT1ABFiles) <= 0:
LT1ABFiles = glob.glob(os.path.join(dateDir, 'LT1*.tif'))
if len(LT1ABFiles)>0:
acquisitionDate = os.path.basename(dateDir)
slcDir = os.path.join(outputDir, acquisitionDate)
os.makedirs(slcDir, exist_ok=True)
cmd = 'unpackFrame_LT1AB.exe -i ' + os.path.abspath(dateDir) + ' -o ' + slcDir
result = os.system(cmd)
# f.write(inps.text_cmd + cmd+'\n')
print(cmd, result)
f.write(cmd+'\n')
f.close()
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -76,9 +76,9 @@ def main(iargs=None):
rmfile = inps.rmfile rmfile = inps.rmfile
# filename of the runfile # filename of the runfile
run_unPack = 'run_unPackRSAT2' run_unPack = os.path.join(inputDir, 'run_unPackRS2.txt')
print('开始解压')
# loop over the different folder, RSAT2 zip/tar files and unzip them, make the names consistent # loop over the different folder, RSAT2 zip/tar files and unzip them, make the names consistent
RSAT2_extensions = (os.path.join(inputDir, 'RS2*SLC*.zip'),os.path.join(inputDir, 'RS2*SLC*.tar'),os.path.join(inputDir, 'RS2*SLC*.gz')) RSAT2_extensions = (os.path.join(inputDir, 'RS2*SLC*.zip'),os.path.join(inputDir, 'RS2*SLC*.tar'),os.path.join(inputDir, 'RS2*SLC*.gz'))
for RSAT2_extension in RSAT2_extensions: for RSAT2_extension in RSAT2_extensions:
@ -160,7 +160,7 @@ def main(iargs=None):
if outputDir is not None: if outputDir is not None:
f = open(run_unPack,'w') f = open(run_unPack,'w')
for dateDir in dateDirs: for dateDir in dateDirs:
RSAT2Files = glob.glob(os.path.join(dateDir, 'imagery_HH.tif')) RSAT2Files = glob.glob(os.path.join(dateDir, 'imagery_*.tif'))
if len(RSAT2Files)>0: if len(RSAT2Files)>0:
acquisitionDate = os.path.basename(dateDir) acquisitionDate = os.path.basename(dateDir)
slcDir = os.path.join(outputDir, acquisitionDate) slcDir = os.path.join(outputDir, acquisitionDate)

View File

@ -65,14 +65,15 @@ def main(iargs=None):
RSAT2_str2 = 'imagery_HH.tif' # RSAT2 extracted files RSAT2_str2 = 'imagery_HH.tif' # RSAT2 extracted files
TSX_TDX_str = 'dims_op*' # TSX zip files TSX_TDX_str = 'dims_op*' # TSX zip files
TSX_TDX_str2 = 'T*X*.xml' # TSX extracted files TSX_TDX_str2 = 'T*X*.xml' # TSX extracted files
GF3_str = 'GF3*' GF3_str = 'GF3*' # GF3
GF3_str2 = 'GF3*.meta.xml' GF3_str2 = 'GF3*.meta.xml' # GF3 meta xml
HJ2_str = 'HJ2*' # HJ2
HJ2_str2 = 'HJ2*.xml' # HJ2 meta xml
# combine together # combine together
sensor_str_list = (ENV_str,ALOS1_str,CSK_str,CSK_str2,RSAT2_str,RSAT2_str2,TSX_TDX_str,TSX_TDX_str2,GF3_str,GF3_str2) sensor_str_list = (ENV_str,ALOS1_str,CSK_str,CSK_str2,RSAT2_str,RSAT2_str2,TSX_TDX_str,TSX_TDX_str2,GF3_str,GF3_str2,HJ2_str,HJ2_str2)
sensor_list = ('Envisat','ALOS1','CSK','CSK','RSAT2','RSAT2','TSX/TDX','TSX/TDX','GF3','GF3') sensor_list = ('Envisat','ALOS1','CSK','CSK','RSAT2','RSAT2','TSX/TDX','TSX/TDX','GF3','GF3','HJ2','HJ2')
sensor_unpackcommand = ('TODO','TODO','TODO','TODO','prepSlcRSAT2.py','prepSlcRSAT2.py','TODO','TODO','prepSlcGF3.exe','prepSlcGF3.exe') sensor_unpackcommand = ('TODO','TODO','TODO','TODO','prepSlcRSAT2.py','prepSlcRSAT2.py','TODO','TODO','prepSlcGF3.py','prepSlcGF3.py','prepSlcHJ2.py','prepSlcHJ2.py')
Sensors = dict(zip(sensor_str_list,sensor_list)) Sensors = dict(zip(sensor_str_list,sensor_list))
Sensors_unpack = dict(zip(sensor_str_list,sensor_unpackcommand)) Sensors_unpack = dict(zip(sensor_str_list,sensor_unpackcommand))

View File

@ -243,7 +243,7 @@ def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDate
# an interferogram stack without ionosphere correction. # an interferogram stack without ionosphere correction.
# coregistration is with geometry + const offset # coregistration is with geometry + const offset
i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False) i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=True)
i+=1 i+=1
runObj = run() runObj = run()
@ -356,7 +356,6 @@ def main(iargs=None):
############################# #############################
if inps.workflow == 'slc': if inps.workflow == 'slc':
print("SLC")
slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False) slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False)
elif inps.workflow == 'interferogram': elif inps.workflow == 'interferogram':

View File

@ -78,7 +78,7 @@ def unpack(RSATdir, slcname):
####Save required metadata for further use ####Save required metadata for further use
####All data is output to a shelve file ####All data is output to a shelve file
pickName = os.path.join(slcname, 'data') pickName = os.path.join(slcname, 'data')
with shelve.open(pickName, "wc") as db: with shelve.open(pickName) as db:
db['frame'] = obj.frame db['frame'] = obj.frame
db['doppler'] = poly db['doppler'] = poly
db['fmrate'] = fpoly db['fmrate'] = fpoly

View File

@ -9,7 +9,6 @@ import datetime
from isce.components.isceobj.Util import Poly1D from isce.components.isceobj.Util import Poly1D
from isce.components.isceobj.Planet.AstronomicalHandbook import Const from isce.components.isceobj.Planet.AstronomicalHandbook import Const
from isce.components.isceobj.Util.decorators import use_api from isce.components.isceobj.Util.decorators import use_api
from imageMath import IML
from isceobj.Orbit import Orbit from isceobj.Orbit import Orbit
from isceobj.Util.Poly2D import Poly2D from isceobj.Util.Poly2D import Poly2D
from isceobj.Planet.Planet import Planet from isceobj.Planet.Planet import Planet
@ -63,8 +62,6 @@ def unpack(RSATdir, slcname):
####Save the doppler polynomial ####Save the doppler polynomial
####CEOS already provides doppler polynomial ####CEOS already provides doppler polynomial
####as a function of range pixel ####as a function of range pixel