Merge pull request #276 from yunjunz/offset
cuDenseOffsets.prep*Geom*: supp. full-reso file w/o .full in the nameLT1AB
commit
a976d12c51
|
@ -40,6 +40,8 @@ def createParser():
|
|||
help='Reference image')
|
||||
parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True,
|
||||
help='Secondary image')
|
||||
parser.add_argument('--fix-xml','--fix-image-xml', dest='fixImageXml', action='store_true',
|
||||
help='Fix the image file path in the XML file. Enable this if input files havee been moved.')
|
||||
|
||||
parser.add_argument('--op','--outprefix','--output-prefix', type=str, dest='outprefix',
|
||||
default='offset', required=True,
|
||||
|
@ -166,12 +168,13 @@ def estimateOffsetField(reference, secondary, inps=None):
|
|||
return 0
|
||||
|
||||
# update file path in xml file
|
||||
for fname in [reference, secondary]:
|
||||
fname = os.path.abspath(fname)
|
||||
img = IML.loadImage(fname)[0]
|
||||
img.filename = fname
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
if inps.fixImageXml:
|
||||
for fname in [reference, secondary]:
|
||||
fname = os.path.abspath(fname)
|
||||
img = IML.loadImage(fname)[0]
|
||||
img.filename = fname
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
###Loading the secondary image object
|
||||
sim = isceobj.createSlcImage()
|
||||
|
@ -382,14 +385,24 @@ def prepareGeometry(full_dir, out_dir, x_start, y_start, x_step, y_step, num_win
|
|||
x/y_step - int, output pixel step in column/row direction
|
||||
num_win_x/y - int, number of columns/rows
|
||||
"""
|
||||
full_dir = os.path.abspath(full_dir)
|
||||
out_dir = os.path.abspath(out_dir)
|
||||
|
||||
# grab the file extension for full resolution file
|
||||
full_exts = ['.rdr.full','.rdr'] if full_dir != out_dir else ['.rdr.full']
|
||||
full_exts = [e for e in full_exts if os.path.isfile(os.path.join(full_dir, '{f}{e}'.format(f=fbases[0], e=e)))]
|
||||
if len(full_exts) == 0:
|
||||
raise ValueError('No full resolution {}.rdr* file found in: {}'.format(fbases[0], full_dir))
|
||||
full_ext = full_exts[0]
|
||||
|
||||
print('-'*50)
|
||||
print('generate the corresponding multi-looked geometry datasets using gdal ...')
|
||||
in_files = [os.path.join(full_dir, '{}.rdr.full'.format(i)) for i in fbases]
|
||||
# input files
|
||||
in_files = [os.path.join(full_dir, '{f}{e}'.format(f=f, e=full_ext)) for f in fbases]
|
||||
in_files = [i for i in in_files if os.path.isfile(i)]
|
||||
if len(in_files) == 0:
|
||||
raise ValueError('No full resolution geometry file found in: {}'.format(full_dir))
|
||||
|
||||
fbases = [os.path.basename(i).split('.')[0] for i in in_files]
|
||||
|
||||
# output files
|
||||
out_files = [os.path.join(out_dir, '{}.rdr'.format(i)) for i in fbases]
|
||||
os.makedirs(out_dir, exist_ok=True)
|
||||
|
||||
|
|
|
@ -26,6 +26,7 @@ noMCF = 'False'
|
|||
defoMax = '2'
|
||||
maxNodes = 72
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for stack processing of StripMap data')
|
||||
|
||||
|
@ -102,6 +103,7 @@ def createParser():
|
|||
parser.add_argument('--summary', dest='summary', action='store_true', default=False, help='Show summary only')
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
inps = parser.parse_args(args=iargs)
|
||||
|
@ -136,11 +138,11 @@ def get_dates(inps):
|
|||
secondaryDates.remove(inps.referenceDate)
|
||||
return acuisitionDates, inps.referenceDate, secondaryDates
|
||||
|
||||
|
||||
def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False):
|
||||
# A coregistered stack of SLCs
|
||||
i=0
|
||||
|
||||
|
||||
if inps.bbox:
|
||||
i+=1
|
||||
runObj = run()
|
||||
|
@ -149,7 +151,6 @@ def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs,
|
|||
runObj.crop(acquisitionDates, config_prefix, native=not inps.zerodop, israw=not inps.nofocus)
|
||||
runObj.finalize()
|
||||
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_reference'.format(i))
|
||||
|
@ -192,38 +193,38 @@ def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs,
|
|||
runObj.finalize()
|
||||
|
||||
if rubberSheet:
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_denseOffset'.format(i))
|
||||
config_prefix = 'config_denseOffset_'
|
||||
runObj.denseOffsets_Network(pairs, stackReferenceDate, secondaryDates, config_prefix)
|
||||
runObj.finalize()
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_denseOffset'.format(i))
|
||||
config_prefix = 'config_denseOffset_'
|
||||
runObj.denseOffsets_Network(pairs, stackReferenceDate, secondaryDates, config_prefix)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_invertDenseOffsets'.format(i))
|
||||
runObj.invertDenseOffsets()
|
||||
runObj.finalize()
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_invertDenseOffsets'.format(i))
|
||||
runObj.invertDenseOffsets()
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_resampleOffset'.format(i))
|
||||
config_prefix = 'config_resampOffsets_'
|
||||
runObj.resampleOffset(secondaryDates, config_prefix)
|
||||
runObj.finalize()
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_resampleOffset'.format(i))
|
||||
config_prefix = 'config_resampOffsets_'
|
||||
runObj.resampleOffset(secondaryDates, config_prefix)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_replaceOffsets'.format(i))
|
||||
runObj.replaceOffsets(secondaryDates)
|
||||
runObj.finalize()
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_replaceOffsets'.format(i))
|
||||
runObj.replaceOffsets(secondaryDates)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_fineResamp'.format(i))
|
||||
config_prefix = 'config_fineResamp_'
|
||||
runObj.secondarys_fine_resampleSlc(stackReferenceDate, secondaryDates, config_prefix, split=splitFlag)
|
||||
runObj.finalize()
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_fineResamp'.format(i))
|
||||
config_prefix = 'config_fineResamp_'
|
||||
runObj.secondarys_fine_resampleSlc(stackReferenceDate, secondaryDates, config_prefix, split=splitFlag)
|
||||
runObj.finalize()
|
||||
|
||||
# adding the baseline grid generation
|
||||
i+=1
|
||||
|
@ -235,11 +236,11 @@ def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs,
|
|||
|
||||
return i
|
||||
|
||||
|
||||
def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs):
|
||||
# an interferogram stack without ionosphere correction.
|
||||
# coregistration is with geometry + const offset
|
||||
|
||||
|
||||
i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False)
|
||||
|
||||
i+=1
|
||||
|
@ -249,6 +250,8 @@ def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDate
|
|||
low_or_high = "/"
|
||||
runObj.igrams_network(pairs, acquisitionDates, stackReferenceDate, low_or_high, config_prefix)
|
||||
runObj.finalize()
|
||||
return
|
||||
|
||||
|
||||
def interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs):
|
||||
|
||||
|
@ -297,69 +300,68 @@ def interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondary
|
|||
config_prefix = 'config_iono_'
|
||||
lowBand = '/LowBand/'
|
||||
highBand = '/HighBand/'
|
||||
runObj.dispersive_nonDispersive(pairs, acquisitionDates, stackReferenceDate,
|
||||
lowBand, highBand, config_prefix)
|
||||
runObj.dispersive_nonDispersive(pairs, acquisitionDates, stackReferenceDate, lowBand, highBand, config_prefix)
|
||||
runObj.finalize()
|
||||
return
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
|
||||
inps = cmdLineParse(iargs)
|
||||
# name of the folder of the coreg SLCs including baselines, SLC, geom_reference subfolders
|
||||
inps.stack_folder = 'merged'
|
||||
inps.dense_offsets_folder = 'dense_offsets'
|
||||
inps = cmdLineParse(iargs)
|
||||
# name of the folder of the coreg SLCs including baselines, SLC, geom_reference subfolders
|
||||
inps.stack_folder = 'merged'
|
||||
inps.dense_offsets_folder = 'dense_offsets'
|
||||
|
||||
|
||||
# check if a sensor is defined and update if needed azimuth looks to give square pixels
|
||||
ar=1
|
||||
if inps.sensor:
|
||||
if inps.sensor.lower() == "alos":
|
||||
ar=4
|
||||
print("Looks like " + inps.sensor.lower() + ", multi-look AR=" + str(ar))
|
||||
elif inps.sensor.lower() == "envisat" or inps.sensor.lower() == "ers":
|
||||
ar=5
|
||||
print("Looks like " + inps.sensor.lower() + ", multi-look AR=" + str(ar))
|
||||
else:
|
||||
print("Sensor is not hard-coded (ers, envisat, alos), will keep default alks")
|
||||
# sensor is not recognised, report to user and state default
|
||||
inps.alks = str(int(inps.alks)*int(ar))
|
||||
# check if a sensor is defined and update if needed azimuth looks to give square pixels
|
||||
ar=1
|
||||
if inps.sensor:
|
||||
if inps.sensor.lower() == "alos":
|
||||
ar=4
|
||||
print("Looks like " + inps.sensor.lower() + ", multi-look AR=" + str(ar))
|
||||
elif inps.sensor.lower() == "envisat" or inps.sensor.lower() == "ers":
|
||||
ar=5
|
||||
print("Looks like " + inps.sensor.lower() + ", multi-look AR=" + str(ar))
|
||||
else:
|
||||
print("Sensor is not hard-coded (ers, envisat, alos), will keep default alks")
|
||||
# sensor is not recognised, report to user and state default
|
||||
inps.alks = str(int(inps.alks)*int(ar))
|
||||
|
||||
# getting the acquisitions
|
||||
acquisitionDates, stackReferenceDate, secondaryDates = get_dates(inps)
|
||||
configDir = os.path.join(inps.workDir,'configs')
|
||||
os.makedirs(configDir, exist_ok=True)
|
||||
runDir = os.path.join(inps.workDir,'run_files')
|
||||
os.makedirs(runDir, exist_ok=True)
|
||||
# getting the acquisitions
|
||||
acquisitionDates, stackReferenceDate, secondaryDates = get_dates(inps)
|
||||
configDir = os.path.join(inps.workDir,'configs')
|
||||
os.makedirs(configDir, exist_ok=True)
|
||||
runDir = os.path.join(inps.workDir,'run_files')
|
||||
os.makedirs(runDir, exist_ok=True)
|
||||
|
||||
if inps.sensor.lower() == 'uavsar_stack': # don't try to calculate baselines for UAVSAR_STACK data
|
||||
pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=False)
|
||||
else:
|
||||
pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=True)
|
||||
print ('number of pairs: ', len(pairs))
|
||||
if inps.sensor and inps.sensor.lower() == 'uavsar_stack': # don't try to calculate baselines for UAVSAR_STACK data
|
||||
pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=False)
|
||||
else:
|
||||
pairs = selectPairs(inps,stackReferenceDate, secondaryDates, acquisitionDates,doBaselines=True)
|
||||
print ('number of pairs: ', len(pairs))
|
||||
|
||||
###If only a summary is requested quit after this
|
||||
if inps.summary:
|
||||
return
|
||||
###If only a summary is requested quit after this
|
||||
if inps.summary:
|
||||
return
|
||||
|
||||
#if cropping is requested, then change the slc directory:
|
||||
inps.fullFrameSlcDir = inps.slcDir
|
||||
#if cropping is requested, then change the slc directory:
|
||||
inps.fullFrameSlcDir = inps.slcDir
|
||||
|
||||
if inps.bbox:
|
||||
inps.slcDir = inps.slcDir + "_crop"
|
||||
#############################
|
||||
if inps.bbox:
|
||||
inps.slcDir = inps.slcDir + "_crop"
|
||||
#############################
|
||||
|
||||
if inps.workflow == 'slc':
|
||||
slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False)
|
||||
if inps.workflow == 'slc':
|
||||
slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs, splitFlag=False, rubberSheet=False)
|
||||
|
||||
elif inps.workflow == 'interferogram':
|
||||
interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs)
|
||||
elif inps.workflow == 'interferogram':
|
||||
interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs)
|
||||
|
||||
elif inps.workflow == 'ionosphere':
|
||||
interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs)
|
||||
elif inps.workflow == 'ionosphere':
|
||||
interferogramIonoStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, pairs)
|
||||
return
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Main engine
|
||||
main()
|
||||
|
||||
|
||||
# Main engine
|
||||
main(sys.argv[1:])
|
||||
|
|
Loading…
Reference in New Issue