various bugfix for topsStack (#500)

* stackSentinel: add compute arg group and simplify main()

+ createParser(): add compute sub group for more organized print out msg

+ main():
   - remove the unsed inps.workflow value checking, as it's covered in the argparse
   - move the update stack date list checking insidie selectNeighborPairs()

* topsStack/mergeBursts: bugfix for float outY/XSize and gdal

+ mergeBursts.py: bugfix for the gdal call when outY/XSize is not integer by
   - set width/height to integer
   - add srcWin argument

+ mergeBursts.py: repeat gdal.Translate call as a workaround solution to the following gdal issue:
   - ERROR 1: Input and output dataset sizes or band counts do not match in GDALDatasetCopyWholeRaster()
   - the issue happens to version 3.3.3 and 3.4, but not in 2.3

+ unwrap: bugfix for intendation inconsistency

+ invertIon: print out num of pairs/dates

+ stackSentinel:
   - sort orbit date for downloading
   - add missing stackReferenceDate arg to selectNeighborPairs() for update mode

+ README: add link to stack processor readme for quick navigation.

* bugfix by removing existing *.hdr files

+ remove existing *.hdr files, to avoid gdal error, instead of repeat the gdal func call

+ replace the gdal_translate cmd call with gdal.Translate() in python.

+ add progress_cb() from stack overflow to printout progress bar in gdal.Translate() call
LT1AB
Zhang Yunjun 2022-06-17 11:00:13 -07:00 committed by GitHub
parent 2bb3f0805d
commit 31bb73ba95
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 130 additions and 135 deletions

View File

@ -42,6 +42,7 @@ TERRASARX, UAVSAR and SAOCOM1A.
- [Running ISCE from the command line](#running-isce-from-the-command-line)
- [Running ISCE in the Python interpreter](#running-isce-in-the-python-interpreter)
- [Running ISCE with steps](#running-isce-with-steps)
- [Running ISCE stack processors](./contrib/stack/README.md)
- [Notes on Digital Elevation Models (DEMs)](#notes-on-digital-elevation-models)
4. [Input Files](#input-files)
5. [Component Configurability](#component-configurability)
@ -515,6 +516,8 @@ this mode of interacting with the pickle object to discover much about
the workflow states and also to edit the state to see its effect
on a subsequent run with \-\-dostep or \-\-start.
### Running [ISCE stack processors](./contrib/stack/README.md)
### Notes on Digital Elevation Models
- ISCE will automatically download SRTM Digital Elevation Models when you run an

View File

@ -192,10 +192,10 @@ if __name__ == '__main__':
else:
dateZero = dates[0]
print('all pairs:\n{}'.format(' '.join(pairsAll)))
print('all dates:\n{}'.format(' '.join(datesAll)))
print('used pairs:\n{}'.format(' '.join(pairs)))
print('used dates:\n{}'.format(' '.join(dates)))
print(f'all pairs ({len(pairsAll)}):\n{pairsAll}')
print(f'all dates ({len(datesAll)}):\n{datesAll}')
print(f'used pairs ({len(pairs)}):\n{pairs}')
print(f'used dates ({len(dates)}):\n{dates}')
####################################################################################

View File

@ -11,6 +11,7 @@ import datetime
import logging
import argparse
import numpy as np
from osgeo import gdal
import isce
import isceobj
@ -283,51 +284,43 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d
Take looks.
'''
# default output filename
if outname is None:
spl = os.path.splitext(infile)
ext = '.{0}alks_{1}rlks'.format(alks, rlks)
outname = spl[0] + ext + spl[1]
if multilook_tool=="gdal":
# remove existing *.hdr files, to avoid the following gdal error:
# ERROR 1: Input and output dataset sizes or band counts do not match in GDALDatasetCopyWholeRaster()
fbase = os.path.splitext(outname)[0]
print(f'remove {fbase}*.hdr')
for fname in glob.glob(f'{fbase}*.hdr'):
os.remove(fname)
from osgeo import gdal
print("multi looking using gdal ...")
if outname is None:
spl = os.path.splitext(infile)
ext = '.{0}alks_{1}rlks'.format(alks, rlks)
outname = spl[0] + ext + spl[1]
print(infile)
ds = gdal.Open(infile + ".vrt", gdal.GA_ReadOnly)
print(f"multilooking {rlks} x {alks} using gdal for {infile} ...")
ds = gdal.Open(infile+'.vrt', gdal.GA_ReadOnly)
xSize = ds.RasterXSize
ySize = ds.RasterYSize
outXSize = int(xSize / int(rlks))
outYSize = int(ySize / int(alks))
srcXSize = outXSize * int(rlks)
srcYSize = outYSize * int(alks)
outXSize = xSize/int(rlks)
outYSize = ySize/int(alks)
if no_data:
gdalTranslateOpts = gdal.TranslateOptions(format="ENVI", width=outXSize, height=outYSize, noData=no_data)
else:
gdalTranslateOpts = gdal.TranslateOptions(format="ENVI", width=outXSize, height=outYSize)
gdal.Translate(outname, ds, options=gdalTranslateOpts)
ds = None
ds = gdal.Open(outname, gdal.GA_ReadOnly)
gdal.Translate(outname+".vrt", ds, options=gdal.TranslateOptions(format="VRT"))
ds = None
options_str = f'-of ENVI -outsize {outXSize} {outYSize} -srcwin 0 0 {srcXSize} {srcYSize} '
options_str += f'-a_nodata {no_data}' if no_data else ''
gdal.Translate(outname, ds, options=options_str)
# generate VRT file
gdal.Translate(outname+".vrt", outname, options='-of VRT')
else:
from mroipac.looks.Looks import Looks
print('Multilooking {0} ...'.format(infile))
print(f'multilooking {rlks} x {alks} using isce2 for {infile} ...')
inimg = isceobj.createImage()
inimg.load(infile + '.xml')
if outname is None:
spl = os.path.splitext(inimg.filename)
ext = '.{0}alks_{1}rlks'.format(alks, rlks)
outname = spl[0] + ext + spl[1]
lkObj = Looks()
lkObj.setDownLooks(alks)
lkObj.setAcrossLooks(rlks)
@ -338,6 +331,20 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d
return outname
def progress_cb(complete, message, cb_data):
'''Emit progress report in numbers for 10% intervals and dots for 3%
Link: https://stackoverflow.com/questions/68025043/adding-a-progress-bar-to-gdal-translate
'''
if int(complete*100) % 10 == 0:
msg = f'{complete*100:.0f}'
print(msg, end='', flush=True)
if msg == '100':
print(' ')
elif int(complete*100) % 3 == 0:
print(f'{cb_data}', end='', flush=True)
return
def main(iargs=None):
'''
@ -408,11 +415,12 @@ def main(iargs=None):
mergeBurstsVirtual(frames, referenceFrames, fileList, inps.outfile+suffix, validOnly=inps.validOnly)
if (not virtual):
print('writing merged file to disk ...')
cmd = 'gdal_translate -of ENVI -co INTERLEAVE=BIL ' + inps.outfile + suffix + '.vrt ' + inps.outfile + suffix
os.system(cmd)
print('writing merged file to disk via gdal.Translate ...')
gdal.Translate(inps.outfile+suffix, inps.outfile+suffix+'.vrt',
options='-of ENVI -co INTERLEAVE=BIL',
callback=progress_cb,
callback_data='.')
print(inps.multilook)
if inps.multilook:
multilook(inps.outfile+suffix,
outname=inps.outfile,

View File

@ -17,7 +17,7 @@ from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
from Stack import config, run, sentinelSLC
helpstr= '''
helpstr = """
Stack processor for Sentinel-1 data using ISCE software.
@ -73,7 +73,7 @@ stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem
For all workflows, coregistration can be done using only geometry or with geometry plus refined azimuth offsets through NESD approach.
Existing workflows: slc, interferogram, correlation, offset
'''
"""
class customArgparseAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
@ -85,7 +85,7 @@ class customArgparseAction(argparse.Action):
def createParser():
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for stack processing of Sentinel data')
parser = argparse.ArgumentParser(description='Preparing the directory structure and config files for stack processing of Sentinel data')
parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction,
help='Display detailed help information.')
@ -117,37 +117,33 @@ def createParser():
parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None,
help="Lat/Lon Bounding SNWE. -- Example : '19 20 -99.5 -98.5' -- Default : common overlap between stack")
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='',
help="text command to be added to the beginning of each line of the run files (default: '%(default)s'). "
"Example : 'source ~/.bash_profile;'")
parser.add_argument('-x', '--exclude_dates', dest='exclude_dates', type=str, default=None,
parser.add_argument('-x', '--exclude_dates', dest='exclude_dates', type=str, default=None,
help="List of the dates to be excluded for processing. -- Example : '20141007,20141031' (default: %(default)s).")
parser.add_argument('-i', '--include_dates', dest='include_dates', type=str, default=None,
parser.add_argument('-i', '--include_dates', dest='include_dates', type=str, default=None,
help="List of the dates to be included for processing. -- Example : '20141007,20141031' (default: %(default)s).")
parser.add_argument('--start_date', dest='startDate', type=str, default=None,
parser.add_argument('--start_date', dest='startDate', type=str, default=None,
help='Start date for stack processing. Acquisitions before start date are ignored. '
'format should be YYYY-MM-DD e.g., 2015-01-23')
parser.add_argument('--stop_date', dest='stopDate', type=str, default=None,
parser.add_argument('--stop_date', dest='stopDate', type=str, default=None,
help='Stop date for stack processing. Acquisitions after stop date are ignored. '
'format should be YYYY-MM-DD e.g., 2017-02-26')
parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3',
parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3',
help='Number of looks in azimuth for interferogram multi-looking (default: %(default)s).')
parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9',
parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9',
help='Number of looks in range for interferogram multi-looking (default: %(default)s).')
parser.add_argument('-f', '--filter_strength', dest='filtStrength', type=str, default='0.5',
parser.add_argument('-f', '--filter_strength', dest='filtStrength', type=str, default='0.5',
help='Filter strength for interferogram filtering (default: %(default)s).')
parser.add_argument('--snr_misreg_threshold', dest='snrThreshold', type=str, default='10',
parser.add_argument('--snr_misreg_threshold', dest='snrThreshold', type=str, default='10',
help='SNR threshold for estimating range misregistration using cross correlation (default: %(default)s).')
parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv',
parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv',
help='SAR data polarization (default: %(default)s).')
parser.add_argument('-C', '--coregistration', dest='coregistration', type=str, default='NESD', choices=['geometry', 'NESD'],
@ -157,39 +153,46 @@ def createParser():
help='number of overlap interferograms between each date and subsequent dates used for NESD computation '
'(for azimuth offsets misregistration) (default: %(default)s).')
parser.add_argument('-e', '--esd_coherence_threshold', dest='esdCoherenceThreshold', type=str, default='0.85',
parser.add_argument('-e', '--esd_coherence_threshold', dest='esdCoherenceThreshold', type=str, default='0.85',
help='Coherence threshold for estimating azimuth misregistration using enhanced spectral diversity (default: %(default)s).')
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram',
choices=['slc', 'correlation', 'interferogram', 'offset'],
help='The InSAR processing workflow (default: %(default)s).')
parser.add_argument('-V', '--virtual_merge', dest='virtualMerge', type=str, default=None, choices=['True', 'False'],
help='Use virtual files for the merged SLCs and geometry files.\n'
'Default: True for correlation / interferogram workflow\n'
' False for slc / offset workflow')
parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False,
help='Allow App to use GPU when available')
parser.add_argument('--num_proc', '--num_process', dest='numProcess', type=int, default=1,
help='number of tasks running in parallel in each run file (default: %(default)s).')
parser.add_argument('--num_proc4topo', '--num_process4topo', dest='numProcess4topo', type=int, default=1,
help='number of parallel processes (for topo only) (default: %(default)s).')
# unwrap
parser.add_argument('-u', '--unw_method', dest='unwMethod', type=str, default='snaphu', choices=['icu', 'snaphu'],
help='Unwrapping method (default: %(default)s).')
parser.add_argument('-rmFilter', '--rmFilter', dest='rmFilter', action='store_true', default=False,
help='Make an extra unwrap file in which filtering effect is removed')
parser.add_argument('--param_ion', dest='param_ion', type=str, default=None,
# ionospheric correction
parser.add_argument('--param_ion', dest='param_ion', type=str, default=None,
help='ionosphere estimation parameter file. if provided, will do ionosphere estimation.')
parser.add_argument('--num_connections_ion', dest='num_connections_ion', type=str, default = '3',
help='number of interferograms between each date and subsequent dates for ionosphere estimation (default: %(default)s).')
# computing
compute = parser.add_argument_group('Computing options')
compute.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False,
help='Allow App to use GPU when available')
compute.add_argument('--num_proc', '--num_process', dest='numProcess', type=int, default=1,
help='number of tasks running in parallel in each run file (default: %(default)s).')
compute.add_argument('--num_proc4topo', '--num_process4topo', dest='numProcess4topo', type=int, default=1,
help='number of parallel processes (for topo only) (default: %(default)s).')
compute.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='',
help="text command to be added to the beginning of each line of the run files (default: '%(default)s'). "
"Example : 'source ~/.bash_profile;'")
compute.add_argument('-V', '--virtual_merge', dest='virtualMerge', type=str, default=None, choices=['True', 'False'],
help='Use virtual files for the merged SLCs and geometry files.\n'
'Default: True for correlation / interferogram workflow\n'
' False for slc / offset workflow')
return parser
@ -213,7 +216,7 @@ def cmdLineParse(iargs = None):
def generate_geopolygon(bbox):
"""generate shapely Polygon"""
from shapely.geometry import Point, Polygon
# convert pnts to shapely polygon format
# the order of pnts is conter-clockwise, starting from the lower ldft corner
# the order for Point is lon,lat
@ -221,6 +224,7 @@ def generate_geopolygon(bbox):
return Polygon([(p.coords.xy[0][0], p.coords.xy[1][0]) for p in points])
####################################
def get_dates(inps):
# Given the SLC directory This function extracts the acquisition dates
@ -249,9 +253,9 @@ def get_dates(inps):
SAFE_files.append(str.replace(line,'\n','').strip())
else:
SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*zip')) # changed to zip file by Minyan Zhong
SAFE_files = sorted(glob.glob(os.path.join(inps.slc_dirname, 'S1*_IW_SLC*zip'))) # changed to zip file by Minyan Zhong
if SAFE_files == []:
SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*SAFE'))
SAFE_files = sorted(glob.glob(os.path.join(inps.slc_dirname, 'S1*_IW_SLC*SAFE')))
if len(SAFE_files) == 0:
raise Exception('No SAFE file found')
@ -429,19 +433,32 @@ def get_dates(inps):
return dateList, inps.reference_date, secondaryList, safe_dict
def selectNeighborPairs(dateList, num_connections, updateStack=False): # should be changed to able to account for the existed aquisitions -- Minyan Zhong
def selectNeighborPairs(dateList, stackReferenceDate, secondaryDates, num_connections, updateStack=False):
"""Select nearest neighbor acquisitions to form seqential pairs."""
pairs = []
if updateStack:
# use the secondaryDates (new acquisitions), instead of the entire list of dates
print('\nUpdating an existing stack ...\n')
# include the reference date for pairing if it is among the most recent acquisitions
dateList = sorted(secondaryDates + [stackReferenceDate])[1:]
num_date = len(dateList)
# translate num_connections input
if num_connections == 'all':
num_connections = len(dateList) - 1
else:
num_connections = int(num_connections)
# selecting nearest pairs based on dateList and num_connections
num_connections = num_connections + 1
for i in range(len(dateList)-1):
for j in range(i+1,i + num_connections):
if j<len(dateList):
pairs.append((dateList[i],dateList[j]))
for i in range(num_date-1):
for j in range(i+1, i+num_connections):
if j < num_date:
pairs.append((dateList[i], dateList[j]))
print('selecting pairs with {} nearest neighbor connections: {}'.format(num_connections-1, len(pairs)))
return pairs
@ -474,7 +491,7 @@ def selectNeighborPairsIonosphere(safe_dict, num_connections):
if starting_ranges[i] not in starting_ranges_unique:
starting_ranges_unique.append(starting_ranges[i])
ndate_unique = len(starting_ranges_unique)
#put dates of same starting ranges in a list
#result is a 2-D list, each D is sorted by date
starting_ranges_unique_dates = [[] for i in range(ndate_unique)]
@ -850,12 +867,9 @@ def checkCurrentStatus(inps):
coregSLC.sort()
if len(coregSLC)>0:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('An existing stack with following coregistered SLCs was found:')
print('\nAn existing stack with following coregistered SLCs was found:')
print(coregSLC)
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
else:
pass
@ -863,11 +877,9 @@ def checkCurrentStatus(inps):
newAcquisitions.sort()
if len(newAcquisitions)>0:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('New acquisitions was found: ')
print('\nNew acquisitions was found: ')
print(newAcquisitions)
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
else:
print(' ********************************* ')
print(' ***************** ')
@ -883,7 +895,6 @@ def checkCurrentStatus(inps):
if inps.coregistration in ['NESD','nesd']:
numSLCReprocess = 2*int(inps.num_overlap_connections)
if numSLCReprocess > len(secondaryDates):
numSLCReprocess = len(secondaryDates)
@ -894,7 +905,6 @@ def checkCurrentStatus(inps):
raise Exception('The original SAFE files for latest {0} coregistered SLCs is needed'.format(numSLCReprocess))
else: # add by Minyan Zhong, should be changed later as numSLCReprocess should be 0
numSLCReprocess = int(inps.num_connections)
if numSLCReprocess > len(secondaryDates):
numSLCReprocess = len(secondaryDates)
@ -905,7 +915,6 @@ def checkCurrentStatus(inps):
raise Exception('The original SAFE files for latest {0} coregistered SLCs is needed'.format(numSLCReprocess))
print ('Last {0} coregistred SLCs to be updated: '.format(numSLCReprocess), latestCoregSLCs)
secondaryDates = latestCoregSLCs + newAcquisitions
secondaryDates.sort()
@ -941,6 +950,7 @@ def checkCurrentStatus(inps):
return acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, stackUpdate
def main(iargs=None):
inps = cmdLineParse(iargs)
@ -955,29 +965,11 @@ def main(iargs=None):
print('**************************')
sys.exit(1)
if inps.workflow not in ['interferogram', 'offset', 'correlation', 'slc']:
print('')
print('**************************')
print('Error: workflow ', inps.workflow, ' is not valid.')
print('Please choose one of these workflows: interferogram, offset, correlation, slc')
print('Use argument "-W" or "--workflow" to choose a specific workflow.')
print('**************************')
print('')
sys.exit(1)
acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack = checkCurrentStatus(inps)
if updateStack:
print('')
print('Updating an existing stack ...')
print('')
dates4NewPairs = sorted(secondaryDates + [stackReferenceDate])[1:]
pairs = selectNeighborPairs(dates4NewPairs, inps.num_connections,updateStack) # will be change later
else:
pairs = selectNeighborPairs(acquisitionDates, inps.num_connections,updateStack)
# selecting pairs for interferograms / correlation / offset workflows
if inps.workflow != 'slc':
pairs = selectNeighborPairs(acquisitionDates, stackReferenceDate, secondaryDates, inps.num_connections, updateStack)
print ('*****************************************')
print ('Coregistration method: ', inps.coregistration )
@ -1007,6 +999,5 @@ def main(iargs=None):
if __name__ == "__main__":
# Main engine
main(sys.argv[1:])

View File

@ -211,7 +211,7 @@ def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = Non
outImage.setWidth(width)
outImage.setLength(length)
outImage.setAccessMode('read')
# outImage.createImage()
# outImage.createImage()
outImage.renderHdr()
outImage.renderVRT()
#outImage.finalizeImage()
@ -225,10 +225,10 @@ def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = Non
connImage.setLength(length)
connImage.setAccessMode('read')
connImage.setDataType('BYTE')
# connImage.createImage()
# connImage.createImage()
connImage.renderHdr()
connImage.renderVRT()
# connImage.finalizeImage()
# connImage.finalizeImage()
return
@ -241,15 +241,9 @@ def runUnwrapMcf(infile, outfile, corfile, config, defomax=2):
def runUnwrapIcu(infile, outfile):
from mroipac.icu.Icu import Icu
#Setup images
#ampImage
# ampImage = obj.insar.resampAmpImage.copy(access_mode='read')
# width = self.ampImage.getWidth()
img = isceobj.createImage()
img.load(infile + '.xml')
width = img.getWidth()
width = img.getWidth()
#intImage
intImage = isceobj.createIntImage()
@ -329,21 +323,20 @@ def main(iargs=None):
else:
fncall = runUnwrapMcf
swathList = ut.getSwathList(inps.reference)
#metadata = extractInfo(inps.reference+'.xml', inps)
xmlFile = os.path.join(inps.reference , 'IW{0}.xml'.format(swathList[0]))
metadata = extractInfo(xmlFile, inps)
fncall(inps.intfile, inps.unwfile, inps.cohfile, metadata, defomax=inps.defomax)
#mask out wired values from snaphu
intImage = isceobj.createImage()
intImage.load(inps.intfile+'.xml')
width = intImage.width
length = intImage.length
#mask out wired values from snaphu
intImage = isceobj.createImage()
intImage.load(inps.intfile+'.xml')
width = intImage.width
length = intImage.length
flag = np.fromfile(inps.intfile, dtype=np.complex64).reshape(length, width)
unw=np.memmap(inps.unwfile, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(flag==0)]=0
(unw[1:length*2:2, :])[np.nonzero(flag==0)]=0
flag = np.fromfile(inps.intfile, dtype=np.complex64).reshape(length, width)
unw=np.memmap(inps.unwfile, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(flag==0)]=0
(unw[1:length*2:2, :])[np.nonzero(flag==0)]=0
elif inps.method == 'icu':
runUnwrapIcu(inps.intfile, inps.unwfile)