LT1AB
Lijun Zhu 2022-07-14 08:42:49 -07:00
commit 55be3ff297
10 changed files with 197 additions and 193 deletions

View File

@ -42,6 +42,7 @@ TERRASARX, UAVSAR and SAOCOM1A.
- [Running ISCE from the command line](#running-isce-from-the-command-line)
- [Running ISCE in the Python interpreter](#running-isce-in-the-python-interpreter)
- [Running ISCE with steps](#running-isce-with-steps)
- [Running ISCE stack processors](./contrib/stack/README.md)
- [Notes on Digital Elevation Models (DEMs)](#notes-on-digital-elevation-models)
4. [Input Files](#input-files)
5. [Component Configurability](#component-configurability)
@ -515,6 +516,8 @@ this mode of interacting with the pickle object to discover much about
the workflow states and also to edit the state to see its effect
on a subsequent run with \-\-dostep or \-\-start.
### Running [ISCE stack processors](./contrib/stack/README.md)
### Notes on Digital Elevation Models
- ISCE will automatically download SRTM Digital Elevation Models when you run an

View File

@ -3,6 +3,7 @@
import os
import argparse
import glob
import isce
import isceobj
from isceobj.Util.ImageUtil import ImageLib as IML
@ -14,7 +15,7 @@ def cmdLineParse():
'''
parser = argparse.ArgumentParser(description='Fixes pathnames in ISCE image XML files. Can be used to do more things in the future.')
parser.add_argument('-i', '--input', type=str, required=True, dest='infile',
parser.add_argument('-i', '--input', type=str, nargs='+', required=True, dest='infile',
help = 'Input image for which the XML file needs to be fixed.')
fname = parser.add_mutually_exclusive_group(required=True)
@ -33,20 +34,19 @@ if __name__ == '__main__':
'''
inps = cmdLineParse()
if inps.infile.endswith('.xml'):
inps.infile = os.path.splitext(inps.infile)[0]
for fname in inps.infile:
if fname.endswith('.xml'):
fname = os.path.splitext(fname)[0]
print('fixing xml file path for file: {}'.format(fname))
dirname = os.path.dirname(inps.infile)
img = IML.loadImage(inps.infile)[0]
if inps.full:
fname = os.path.abspath( os.path.join(dirname, os.path.basename(inps.infile)))
else:
fname = os.path.basename( os.path.basename(inps.infile))
img.filename = fname
img.setAccessMode('READ')
img.renderHdr()
if inps.full:
fdir = os.path.dirname(fname)
fname = os.path.abspath(os.path.join(fdir, os.path.basename(fname)))
else:
fname = os.path.basename(os.path.basename(fname))
img = IML.loadImage(fname)[0]
img.filename = fname
img.setAccessMode('READ')
img.renderHdr()

View File

@ -42,7 +42,9 @@ def createParser():
parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True,
help='Secondary image')
parser.add_argument('--fix-xml','--fix-image-xml', dest='fixImageXml', action='store_true',
help='Fix the image file path in the XML file. Enable this if input files havee been moved.')
help='Fix the image file path in the XML file. Enable this if input files have been moved.')
parser.add_argument('--fix-vrt','--fix-image-vrt', dest='fixImageVrt', action='store_true',
help='Fix the image file path in the VRT file. Enable this if input files have VRT pointing to non-existing burst files')
parser.add_argument('--op','--outprefix','--output-prefix', type=str, dest='outprefix',
default='offset', required=True,
@ -166,6 +168,12 @@ def estimateOffsetField(reference, secondary, inps=None):
img.setAccessMode('READ')
img.renderHdr()
if inps.fixImageVrt:
for fname in [reference, secondary]:
fname = os.path.abspath(fname)
img = IML.loadImage(fname)[0]
img.renderVRT()
###Loading the secondary image object
sim = isceobj.createSlcImage()
sim.load(secondary+'.xml')

View File

@ -29,19 +29,19 @@
import os
import sys
import time
import argparse
import shelve
import isce
import sys
import isceobj
from contrib.Snaphu.Snaphu import Snaphu
from isceobj.Constants import SPEED_OF_LIGHT
import argparse
import os
import pickle
import sys
import shelve
from contrib.Snaphu.Snaphu import Snaphu
#from contrib.UnwrapComp.unwrapComponents import UnwrapComponents
def createParser():
'''
Create command line parser.
@ -89,9 +89,6 @@ def extractInfoFromPickle(pckfile, inps):
from isceobj.Planet.Planet import Planet
from isceobj.Util.geo.ellipsoid import Ellipsoid
# with open(pckfile, 'rb') as f:
# frame = pickle.load(f)
with shelve.open(pckfile,flag='r') as db:
# frame = db['swath']
burst = db['frame']
@ -292,6 +289,7 @@ def main(iargs=None):
'''
The main driver.
'''
start_time = time.time()
inps = cmdLineParse(iargs)
print(inps.method)
@ -338,6 +336,10 @@ def main(iargs=None):
elif inps.method == 'icu':
runUnwrapIcu(inps.intfile, inps.unwprefix + '_icu.unw')
# time usage
m, s = divmod(time.time() - start_time, 60)
print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
if __name__ == '__main__':

View File

@ -192,10 +192,10 @@ if __name__ == '__main__':
else:
dateZero = dates[0]
print('all pairs:\n{}'.format(' '.join(pairsAll)))
print('all dates:\n{}'.format(' '.join(datesAll)))
print('used pairs:\n{}'.format(' '.join(pairs)))
print('used dates:\n{}'.format(' '.join(dates)))
print(f'all pairs ({len(pairsAll)}):\n{pairsAll}')
print(f'all dates ({len(datesAll)}):\n{datesAll}')
print(f'used pairs ({len(pairs)}):\n{pairs}')
print(f'used dates ({len(dates)}):\n{dates}')
####################################################################################

View File

@ -11,6 +11,7 @@ import datetime
import logging
import argparse
import numpy as np
from osgeo import gdal
import isce
import isceobj
@ -283,51 +284,43 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d
Take looks.
'''
# default output filename
if outname is None:
spl = os.path.splitext(infile)
ext = '.{0}alks_{1}rlks'.format(alks, rlks)
outname = spl[0] + ext + spl[1]
if multilook_tool=="gdal":
# remove existing *.hdr files, to avoid the following gdal error:
# ERROR 1: Input and output dataset sizes or band counts do not match in GDALDatasetCopyWholeRaster()
fbase = os.path.splitext(outname)[0]
print(f'remove {fbase}*.hdr')
for fname in glob.glob(f'{fbase}*.hdr'):
os.remove(fname)
from osgeo import gdal
print("multi looking using gdal ...")
if outname is None:
spl = os.path.splitext(infile)
ext = '.{0}alks_{1}rlks'.format(alks, rlks)
outname = spl[0] + ext + spl[1]
print(infile)
ds = gdal.Open(infile + ".vrt", gdal.GA_ReadOnly)
print(f"multilooking {rlks} x {alks} using gdal for {infile} ...")
ds = gdal.Open(infile+'.vrt', gdal.GA_ReadOnly)
xSize = ds.RasterXSize
ySize = ds.RasterYSize
outXSize = int(xSize / int(rlks))
outYSize = int(ySize / int(alks))
srcXSize = outXSize * int(rlks)
srcYSize = outYSize * int(alks)
outXSize = xSize/int(rlks)
outYSize = ySize/int(alks)
if no_data:
gdalTranslateOpts = gdal.TranslateOptions(format="ENVI", width=outXSize, height=outYSize, noData=no_data)
else:
gdalTranslateOpts = gdal.TranslateOptions(format="ENVI", width=outXSize, height=outYSize)
gdal.Translate(outname, ds, options=gdalTranslateOpts)
ds = None
ds = gdal.Open(outname, gdal.GA_ReadOnly)
gdal.Translate(outname+".vrt", ds, options=gdal.TranslateOptions(format="VRT"))
ds = None
options_str = f'-of ENVI -outsize {outXSize} {outYSize} -srcwin 0 0 {srcXSize} {srcYSize} '
options_str += f'-a_nodata {no_data}' if no_data else ''
gdal.Translate(outname, ds, options=options_str)
# generate VRT file
gdal.Translate(outname+".vrt", outname, options='-of VRT')
else:
from mroipac.looks.Looks import Looks
print('Multilooking {0} ...'.format(infile))
print(f'multilooking {rlks} x {alks} using isce2 for {infile} ...')
inimg = isceobj.createImage()
inimg.load(infile + '.xml')
if outname is None:
spl = os.path.splitext(inimg.filename)
ext = '.{0}alks_{1}rlks'.format(alks, rlks)
outname = spl[0] + ext + spl[1]
lkObj = Looks()
lkObj.setDownLooks(alks)
lkObj.setAcrossLooks(rlks)
@ -338,6 +331,20 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d
return outname
def progress_cb(complete, message, cb_data):
'''Emit progress report in numbers for 10% intervals and dots for 3%
Link: https://stackoverflow.com/questions/68025043/adding-a-progress-bar-to-gdal-translate
'''
if int(complete*100) % 10 == 0:
msg = f'{complete*100:.0f}'
print(msg, end='', flush=True)
if msg == '100':
print(' ')
elif int(complete*100) % 3 == 0:
print(f'{cb_data}', end='', flush=True)
return
def main(iargs=None):
'''
@ -408,11 +415,12 @@ def main(iargs=None):
mergeBurstsVirtual(frames, referenceFrames, fileList, inps.outfile+suffix, validOnly=inps.validOnly)
if (not virtual):
print('writing merged file to disk ...')
cmd = 'gdal_translate -of ENVI -co INTERLEAVE=BIL ' + inps.outfile + suffix + '.vrt ' + inps.outfile + suffix
os.system(cmd)
print('writing merged file to disk via gdal.Translate ...')
gdal.Translate(inps.outfile+suffix, inps.outfile+suffix+'.vrt',
options='-of ENVI -co INTERLEAVE=BIL',
callback=progress_cb,
callback_data='.')
print(inps.multilook)
if inps.multilook:
multilook(inps.outfile+suffix,
outname=inps.outfile,

View File

@ -17,7 +17,7 @@ from isceobj.Sensor.TOPS.Sentinel1 import Sentinel1
from Stack import config, run, sentinelSLC
helpstr= '''
helpstr = """
Stack processor for Sentinel-1 data using ISCE software.
@ -73,7 +73,7 @@ stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem
For all workflows, coregistration can be done using only geometry or with geometry plus refined azimuth offsets through NESD approach.
Existing workflows: slc, interferogram, correlation, offset
'''
"""
class customArgparseAction(argparse.Action):
def __call__(self, parser, args, values, option_string=None):
@ -85,7 +85,7 @@ class customArgparseAction(argparse.Action):
def createParser():
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for stack processing of Sentinel data')
parser = argparse.ArgumentParser(description='Preparing the directory structure and config files for stack processing of Sentinel data')
parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction,
help='Display detailed help information.')
@ -117,37 +117,33 @@ def createParser():
parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None,
help="Lat/Lon Bounding SNWE. -- Example : '19 20 -99.5 -98.5' -- Default : common overlap between stack")
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='',
help="text command to be added to the beginning of each line of the run files (default: '%(default)s'). "
"Example : 'source ~/.bash_profile;'")
parser.add_argument('-x', '--exclude_dates', dest='exclude_dates', type=str, default=None,
parser.add_argument('-x', '--exclude_dates', dest='exclude_dates', type=str, default=None,
help="List of the dates to be excluded for processing. -- Example : '20141007,20141031' (default: %(default)s).")
parser.add_argument('-i', '--include_dates', dest='include_dates', type=str, default=None,
parser.add_argument('-i', '--include_dates', dest='include_dates', type=str, default=None,
help="List of the dates to be included for processing. -- Example : '20141007,20141031' (default: %(default)s).")
parser.add_argument('--start_date', dest='startDate', type=str, default=None,
parser.add_argument('--start_date', dest='startDate', type=str, default=None,
help='Start date for stack processing. Acquisitions before start date are ignored. '
'format should be YYYY-MM-DD e.g., 2015-01-23')
parser.add_argument('--stop_date', dest='stopDate', type=str, default=None,
parser.add_argument('--stop_date', dest='stopDate', type=str, default=None,
help='Stop date for stack processing. Acquisitions after stop date are ignored. '
'format should be YYYY-MM-DD e.g., 2017-02-26')
parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3',
parser.add_argument('-z', '--azimuth_looks', dest='azimuthLooks', type=str, default='3',
help='Number of looks in azimuth for interferogram multi-looking (default: %(default)s).')
parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9',
parser.add_argument('-r', '--range_looks', dest='rangeLooks', type=str, default='9',
help='Number of looks in range for interferogram multi-looking (default: %(default)s).')
parser.add_argument('-f', '--filter_strength', dest='filtStrength', type=str, default='0.5',
parser.add_argument('-f', '--filter_strength', dest='filtStrength', type=str, default='0.5',
help='Filter strength for interferogram filtering (default: %(default)s).')
parser.add_argument('--snr_misreg_threshold', dest='snrThreshold', type=str, default='10',
parser.add_argument('--snr_misreg_threshold', dest='snrThreshold', type=str, default='10',
help='SNR threshold for estimating range misregistration using cross correlation (default: %(default)s).')
parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv',
parser.add_argument('-p', '--polarization', dest='polarization', type=str, default='vv',
help='SAR data polarization (default: %(default)s).')
parser.add_argument('-C', '--coregistration', dest='coregistration', type=str, default='NESD', choices=['geometry', 'NESD'],
@ -157,39 +153,46 @@ def createParser():
help='number of overlap interferograms between each date and subsequent dates used for NESD computation '
'(for azimuth offsets misregistration) (default: %(default)s).')
parser.add_argument('-e', '--esd_coherence_threshold', dest='esdCoherenceThreshold', type=str, default='0.85',
parser.add_argument('-e', '--esd_coherence_threshold', dest='esdCoherenceThreshold', type=str, default='0.85',
help='Coherence threshold for estimating azimuth misregistration using enhanced spectral diversity (default: %(default)s).')
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram',
choices=['slc', 'correlation', 'interferogram', 'offset'],
help='The InSAR processing workflow (default: %(default)s).')
parser.add_argument('-V', '--virtual_merge', dest='virtualMerge', type=str, default=None, choices=['True', 'False'],
help='Use virtual files for the merged SLCs and geometry files.\n'
'Default: True for correlation / interferogram workflow\n'
' False for slc / offset workflow')
parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False,
help='Allow App to use GPU when available')
parser.add_argument('--num_proc', '--num_process', dest='numProcess', type=int, default=1,
help='number of tasks running in parallel in each run file (default: %(default)s).')
parser.add_argument('--num_proc4topo', '--num_process4topo', dest='numProcess4topo', type=int, default=1,
help='number of parallel processes (for topo only) (default: %(default)s).')
# unwrap
parser.add_argument('-u', '--unw_method', dest='unwMethod', type=str, default='snaphu', choices=['icu', 'snaphu'],
help='Unwrapping method (default: %(default)s).')
parser.add_argument('-rmFilter', '--rmFilter', dest='rmFilter', action='store_true', default=False,
help='Make an extra unwrap file in which filtering effect is removed')
parser.add_argument('--param_ion', dest='param_ion', type=str, default=None,
# ionospheric correction
parser.add_argument('--param_ion', dest='param_ion', type=str, default=None,
help='ionosphere estimation parameter file. if provided, will do ionosphere estimation.')
parser.add_argument('--num_connections_ion', dest='num_connections_ion', type=str, default = '3',
help='number of interferograms between each date and subsequent dates for ionosphere estimation (default: %(default)s).')
# computing
compute = parser.add_argument_group('Computing options')
compute.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False,
help='Allow App to use GPU when available')
compute.add_argument('--num_proc', '--num_process', dest='numProcess', type=int, default=1,
help='number of tasks running in parallel in each run file (default: %(default)s).')
compute.add_argument('--num_proc4topo', '--num_process4topo', dest='numProcess4topo', type=int, default=1,
help='number of parallel processes (for topo only) (default: %(default)s).')
compute.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='',
help="text command to be added to the beginning of each line of the run files (default: '%(default)s'). "
"Example : 'source ~/.bash_profile;'")
compute.add_argument('-V', '--virtual_merge', dest='virtualMerge', type=str, default=None, choices=['True', 'False'],
help='Use virtual files for the merged SLCs and geometry files.\n'
'Default: True for correlation / interferogram workflow\n'
' False for slc / offset workflow')
return parser
@ -213,7 +216,7 @@ def cmdLineParse(iargs = None):
def generate_geopolygon(bbox):
"""generate shapely Polygon"""
from shapely.geometry import Point, Polygon
# convert pnts to shapely polygon format
# the order of pnts is conter-clockwise, starting from the lower ldft corner
# the order for Point is lon,lat
@ -221,6 +224,7 @@ def generate_geopolygon(bbox):
return Polygon([(p.coords.xy[0][0], p.coords.xy[1][0]) for p in points])
####################################
def get_dates(inps):
# Given the SLC directory This function extracts the acquisition dates
@ -230,6 +234,7 @@ def get_dates(inps):
if inps.bbox is not None:
bbox = [float(val) for val in inps.bbox.split()]
bbox_poly = np.array([[bbox[2],bbox[0]],[bbox[3],bbox[0]],[bbox[3],bbox[1]],[bbox[2],bbox[1]]])
if inps.exclude_dates is not None:
excludeList = inps.exclude_dates.split(',')
@ -248,9 +253,9 @@ def get_dates(inps):
SAFE_files.append(str.replace(line,'\n','').strip())
else:
SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*zip')) # changed to zip file by Minyan Zhong
SAFE_files = sorted(glob.glob(os.path.join(inps.slc_dirname, 'S1*_IW_SLC*zip'))) # changed to zip file by Minyan Zhong
if SAFE_files == []:
SAFE_files = glob.glob(os.path.join(inps.slc_dirname,'S1*_IW_SLC*SAFE'))
SAFE_files = sorted(glob.glob(os.path.join(inps.slc_dirname, 'S1*_IW_SLC*SAFE')))
if len(SAFE_files) == 0:
raise Exception('No SAFE file found')
@ -278,7 +283,6 @@ def get_dates(inps):
f = open('SAFE_files.txt','w')
safe_count=0
safe_dict={}
bbox_poly = np.array([[bbox[2],bbox[0]],[bbox[3],bbox[0]],[bbox[3],bbox[1]],[bbox[2],bbox[1]]])
for safe in SAFE_files:
safeObj=sentinelSLC(safe)
@ -429,19 +433,32 @@ def get_dates(inps):
return dateList, inps.reference_date, secondaryList, safe_dict
def selectNeighborPairs(dateList, num_connections, updateStack=False): # should be changed to able to account for the existed aquisitions -- Minyan Zhong
def selectNeighborPairs(dateList, stackReferenceDate, secondaryDates, num_connections, updateStack=False):
"""Select nearest neighbor acquisitions to form seqential pairs."""
pairs = []
if updateStack:
# use the secondaryDates (new acquisitions), instead of the entire list of dates
print('\nUpdating an existing stack ...\n')
# include the reference date for pairing if it is among the most recent acquisitions
dateList = sorted(secondaryDates + [stackReferenceDate])[1:]
num_date = len(dateList)
# translate num_connections input
if num_connections == 'all':
num_connections = len(dateList) - 1
else:
num_connections = int(num_connections)
# selecting nearest pairs based on dateList and num_connections
num_connections = num_connections + 1
for i in range(len(dateList)-1):
for j in range(i+1,i + num_connections):
if j<len(dateList):
pairs.append((dateList[i],dateList[j]))
for i in range(num_date-1):
for j in range(i+1, i+num_connections):
if j < num_date:
pairs.append((dateList[i], dateList[j]))
print('selecting pairs with {} nearest neighbor connections: {}'.format(num_connections-1, len(pairs)))
return pairs
@ -474,7 +491,7 @@ def selectNeighborPairsIonosphere(safe_dict, num_connections):
if starting_ranges[i] not in starting_ranges_unique:
starting_ranges_unique.append(starting_ranges[i])
ndate_unique = len(starting_ranges_unique)
#put dates of same starting ranges in a list
#result is a 2-D list, each D is sorted by date
starting_ranges_unique_dates = [[] for i in range(ndate_unique)]
@ -850,12 +867,9 @@ def checkCurrentStatus(inps):
coregSLC.sort()
if len(coregSLC)>0:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('An existing stack with following coregistered SLCs was found:')
print('\nAn existing stack with following coregistered SLCs was found:')
print(coregSLC)
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
else:
pass
@ -863,11 +877,9 @@ def checkCurrentStatus(inps):
newAcquisitions.sort()
if len(newAcquisitions)>0:
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('')
print('New acquisitions was found: ')
print('\nNew acquisitions was found: ')
print(newAcquisitions)
print('')
print('%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
print('\n%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%')
else:
print(' ********************************* ')
print(' ***************** ')
@ -883,7 +895,6 @@ def checkCurrentStatus(inps):
if inps.coregistration in ['NESD','nesd']:
numSLCReprocess = 2*int(inps.num_overlap_connections)
if numSLCReprocess > len(secondaryDates):
numSLCReprocess = len(secondaryDates)
@ -894,7 +905,6 @@ def checkCurrentStatus(inps):
raise Exception('The original SAFE files for latest {0} coregistered SLCs is needed'.format(numSLCReprocess))
else: # add by Minyan Zhong, should be changed later as numSLCReprocess should be 0
numSLCReprocess = int(inps.num_connections)
if numSLCReprocess > len(secondaryDates):
numSLCReprocess = len(secondaryDates)
@ -905,7 +915,6 @@ def checkCurrentStatus(inps):
raise Exception('The original SAFE files for latest {0} coregistered SLCs is needed'.format(numSLCReprocess))
print ('Last {0} coregistred SLCs to be updated: '.format(numSLCReprocess), latestCoregSLCs)
secondaryDates = latestCoregSLCs + newAcquisitions
secondaryDates.sort()
@ -941,6 +950,7 @@ def checkCurrentStatus(inps):
return acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, stackUpdate
def main(iargs=None):
inps = cmdLineParse(iargs)
@ -955,29 +965,11 @@ def main(iargs=None):
print('**************************')
sys.exit(1)
if inps.workflow not in ['interferogram', 'offset', 'correlation', 'slc']:
print('')
print('**************************')
print('Error: workflow ', inps.workflow, ' is not valid.')
print('Please choose one of these workflows: interferogram, offset, correlation, slc')
print('Use argument "-W" or "--workflow" to choose a specific workflow.')
print('**************************')
print('')
sys.exit(1)
acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack = checkCurrentStatus(inps)
if updateStack:
print('')
print('Updating an existing stack ...')
print('')
dates4NewPairs = sorted(secondaryDates + [stackReferenceDate])[1:]
pairs = selectNeighborPairs(dates4NewPairs, inps.num_connections,updateStack) # will be change later
else:
pairs = selectNeighborPairs(acquisitionDates, inps.num_connections,updateStack)
# selecting pairs for interferograms / correlation / offset workflows
if inps.workflow != 'slc':
pairs = selectNeighborPairs(acquisitionDates, stackReferenceDate, secondaryDates, inps.num_connections, updateStack)
print ('*****************************************')
print ('Coregistration method: ', inps.coregistration )
@ -1007,6 +999,5 @@ def main(iargs=None):
if __name__ == "__main__":
# Main engine
main(sys.argv[1:])

View File

@ -30,19 +30,20 @@
# giangi: taken Piyush code for snaphu and adapted
import isce
import sys
import isceobj
from contrib.Snaphu.Snaphu import Snaphu
from isceobj.Constants import SPEED_OF_LIGHT
import argparse
import os
import pickle
from osgeo import gdal
import sys
import time
import argparse
import numpy as np
#import shelve
import s1a_isce_utils as ut
from osgeo import gdal
import isce
import isceobj
from isceobj.Constants import SPEED_OF_LIGHT
from isceobj.Util.ImageUtil import ImageLib as IML
from contrib.Snaphu.Snaphu import Snaphu
import s1a_isce_utils as ut
def createParser():
'''
@ -94,12 +95,6 @@ def extractInfo(xmlName, inps):
from isceobj.Planet.Planet import Planet
from isceobj.Util.geo.ellipsoid import Ellipsoid
# with open(pckfile, 'rb') as f:
# frame = pickle.load(f)
#with shelve.open(pckfile,flag='r') as db:
# frame = db['swath']
frame = ut.loadProduct(xmlName)
burst = frame.bursts[0]
@ -216,7 +211,7 @@ def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = Non
outImage.setWidth(width)
outImage.setLength(length)
outImage.setAccessMode('read')
# outImage.createImage()
# outImage.createImage()
outImage.renderHdr()
outImage.renderVRT()
#outImage.finalizeImage()
@ -230,10 +225,10 @@ def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = Non
connImage.setLength(length)
connImage.setAccessMode('read')
connImage.setDataType('BYTE')
# connImage.createImage()
# connImage.createImage()
connImage.renderHdr()
connImage.renderVRT()
# connImage.finalizeImage()
# connImage.finalizeImage()
return
@ -246,15 +241,9 @@ def runUnwrapMcf(infile, outfile, corfile, config, defomax=2):
def runUnwrapIcu(infile, outfile):
from mroipac.icu.Icu import Icu
#Setup images
#ampImage
# ampImage = obj.insar.resampAmpImage.copy(access_mode='read')
# width = self.ampImage.getWidth()
img = isceobj.createImage()
img.load(infile + '.xml')
width = img.getWidth()
width = img.getWidth()
#intImage
intImage = isceobj.createIntImage()
@ -324,34 +313,33 @@ def main(iargs=None):
'''
The main driver.
'''
start_time = time.time()
inps = cmdLineParse(iargs)
print ('unwrapping method : ' , inps.method)
if inps.method == 'snaphu':
if inps.nomcf:
fncall = runUnwrap
else:
fncall = runUnwrapMcf
swathList = ut.getSwathList(inps.reference)
#metadata = extractInfo(inps.reference+'.xml', inps)
xmlFile = os.path.join(inps.reference , 'IW{0}.xml'.format(swathList[0]))
metadata = extractInfo(xmlFile, inps)
fncall(inps.intfile, inps.unwfile, inps.cohfile, metadata, defomax=inps.defomax)
if inps.nomcf:
fncall = runUnwrap
else:
fncall = runUnwrapMcf
swathList = ut.getSwathList(inps.reference)
xmlFile = os.path.join(inps.reference , 'IW{0}.xml'.format(swathList[0]))
metadata = extractInfo(xmlFile, inps)
fncall(inps.intfile, inps.unwfile, inps.cohfile, metadata, defomax=inps.defomax)
#mask out wired values from snaphu
intImage = isceobj.createImage()
intImage.load(inps.intfile+'.xml')
width = intImage.width
length = intImage.length
#mask out wired values from snaphu
intImage = isceobj.createImage()
intImage.load(inps.intfile+'.xml')
width = intImage.width
length = intImage.length
flag = np.fromfile(inps.intfile, dtype=np.complex64).reshape(length, width)
unw=np.memmap(inps.unwfile, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(flag==0)]=0
(unw[1:length*2:2, :])[np.nonzero(flag==0)]=0
flag = np.fromfile(inps.intfile, dtype=np.complex64).reshape(length, width)
unw=np.memmap(inps.unwfile, dtype='float32', mode='r+', shape=(length*2, width))
(unw[0:length*2:2, :])[np.nonzero(flag==0)]=0
(unw[1:length*2:2, :])[np.nonzero(flag==0)]=0
elif inps.method == 'icu':
runUnwrapIcu(inps.intfile, inps.unwfile)
runUnwrapIcu(inps.intfile, inps.unwfile)
if inps.rmfilter:
filtfile = os.path.abspath(inps.intfile)
@ -360,6 +348,10 @@ def main(iargs=None):
remove_filter(intfile, filtfile, inps.unwfile)
# time usage
m, s = divmod(time.time() - start_time, 60)
print('time used: {:02.0f} mins {:02.1f} secs.'.format(m, s))
if __name__ == '__main__':

View File

@ -207,8 +207,8 @@ f*_*: folders containing the frame processing results.
insar: users should be mostly interested in this folder. For the explanations of the files, please refer
to the xml files in folder "explanations". You can find the differential interferograms without and with
ionospheric correction if you have chosen to do ionospheric correction. For example,
diff_150405-150503_5rlks_28alks.int: interferoram without ionospheric correction
diff_150405-150503_5rlks_28alks_ori.int: interferogram with ionospheric correction
diff_150405-150503_5rlks_28alks.int: interferoram with ionospheric correction
diff_150405-150503_5rlks_28alks_ori.int: original interferogram without ionospheric correction
ion: folder for computing ionospheric phase. subband interferograms are created in folders "lower" and
"upper", and final computations are done in "ion_cal".

View File

@ -156,8 +156,8 @@ By default, ESD is always performed.
<!-- <<PHASE UNWRAPPING>>
Phase unwrapping can be optionally turned off.
Following unwrappers are currently available:
grass, icu, snaphu, snaphu_mcf-->
<!--<property name="unwrap">True</property>-->
grass, icu, snaphu, snaphu_mcf, downsample_snaphu-->
<!--<property name="do unwrap">True</property>-->
<!--<property name="unwrapper name">snaphu_mcf</property>-->
<!-- 2 Stage unwrapping option requires the following tags:-->
<!--<property name="do unwrap">True</property> -->