Merge pull request #323 from yunjunz/tops_stack

stackSentinel: expose virtual merge via -V / --virtual_merge option
LT1AB
Ryan Burns 2021-08-16 15:59:37 -07:00 committed by GitHub
commit d63c40a30d
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 100 additions and 88 deletions

View File

@ -37,7 +37,7 @@ The following calibration auxliary (AUX_CAL) file is used for **antenna pattern
Run the command below to download the AUX_CAL file once and store it somewhere (_i.e._ ~/aux/aux_cal) so that you can use it all the time, for `stackSentinel.py -a` or `auxiliary data directory` in `topsApp.py`.
```
wget https://aux.sentinel1.eo.esa.int/AUX_CAL/2014/09/08/S1A_AUX_CAL_V20140908T000000_G20190626T100201.SAFE/ --no-check-certificate --recursive --level=1 --cut-dirs=4 -nH
wget https://qc.sentinel1.groupcls.com/product/S1A/AUX_CAL/2014/09/08/S1A_AUX_CAL_V20140908T000000_G20190626T100201.SAFE.TGZ
```
#### 1. Create your project folder somewhere ####
@ -53,7 +53,7 @@ Download of DEM (need to use wgs84 version) using the ISCE DEM download script.
```
mkdir DEM; cd DEM
dem.py -a stitch -b 18 20 -100 -97 -r -s 1 c
dem.py -a stitch -b 18 20 -100 -97 -r -s 1 -c
rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt
cd ..
```

View File

@ -5,17 +5,18 @@
# Heresh Fattahi, updated for stack processing
import numpy as np
import os
import isce
import isceobj
import glob
import datetime
import logging
import argparse
import numpy as np
import isce
import isceobj
from isceobj.Util.ImageUtil import ImageLib as IML
from isceobj.Util.decorators import use_api
import s1a_isce_utils as ut
import glob
def createParser():
@ -25,46 +26,45 @@ def createParser():
parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths')
parser.add_argument('-i', '--inp_reference', type=str, dest='reference', required=True,
help='Directory with the reference image')
help='Directory with the reference image')
parser.add_argument('-s', '--stack', type=str, dest='stack', default = None,
help='Directory with the stack xml files which includes the common valid region of the stack')
help='Directory with the stack xml files which includes the common valid region of the stack')
parser.add_argument('-d', '--dirname', type=str, dest='dirname', required=True,
help='directory with products to merge')
help='directory with products to merge')
parser.add_argument('-o', '--outfile', type=str, dest='outfile', required=True,
help='Output merged file')
help='Output merged file')
parser.add_argument('-m', '--method', type=str, dest='method', default='avg',
help = 'Method: top / bot/ avg')
help='Method: top / bot/ avg')
parser.add_argument('-a', '--aligned', action='store_true', dest='isaligned',
default=False, help='Use reference information instead of coreg for merged grid.')
parser.add_argument('-a', '--aligned', action='store_true', dest='isaligned', default=False,
help='Use reference information instead of coreg for merged grid.')
parser.add_argument('-l', '--multilook', action='store_true', dest='multilook', default=False,
help = 'Multilook the merged products. True or False')
help='Multilook the merged products. True or False')
parser.add_argument('-A', '--azimuth_looks', type=str, dest='numberAzimuthLooks', default=3,
help = 'azimuth looks')
parser.add_argument('-A', '--azimuth_looks', type=str, dest='numberAzimuthLooks', default=3, help='azimuth looks')
parser.add_argument('-R', '--range_looks', type=str, dest='numberRangeLooks', default=9,
help = 'range looks')
parser.add_argument('-R', '--range_looks', type=str, dest='numberRangeLooks', default=9, help='range looks')
parser.add_argument('-n', '--name_pattern', type=str, dest='namePattern', default='fine*int',
help = 'a name pattern of burst products that will be merged. default: fine. it can be lat, lon, los, burst, hgt, shadowMask, incLocal')
help='a name pattern of burst products that will be merged. '
'default: fine. it can be lat, lon, los, burst, hgt, shadowMask, incLocal')
parser.add_argument('-v', '--valid_only', action='store_true', dest='validOnly', default=False,
help = 'True for SLC, int and coherence. False for geometry files (lat, lon, los, hgt, shadowMask, incLocal).')
help='True for SLC, int and coherence. False for geometry files (lat, lon, los, hgt, shadowMask, incLocal).')
parser.add_argument('-u', '--use_virtual_files', action='store_true', dest='useVirtualFiles', default=False,
help = 'writing only a vrt of merged file. Default: True.')
help='writing only a vrt of merged file. Default: True.')
parser.add_argument('-M', '--multilook_tool', type=str, dest='multilookTool', default='isce',
help = 'The tool used for multi-looking')
help='The tool used for multi-looking')
parser.add_argument('-N', '--no_data_value', type=float, dest='noData', default=None,
help = 'no data value when gdal is used for multi-looking')
help='no data value when gdal is used for multi-looking')
return parser
@ -102,7 +102,7 @@ def mergeBurstsVirtual(frame, referenceFrame, fileList, outfile, validOnly=True)
rightSwath = max(refSwaths, key = lambda x: x.farRange)
totalWidth = int( np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1))
totalWidth = int(np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1))
totalLength = int(np.round((botSwath.sensingStop - topSwath.sensingStart).total_seconds()/topSwath.dt + 1 ))
@ -194,7 +194,7 @@ def mergeBursts(frame, fileList, outfile,
linecount = start
outMap = IML.memmap(outfile, mode='write', nchannels=bands,
nxx=width, nyy=nLines, scheme=scheme, dataType=npType)
nxx=width, nyy=nLines, scheme=scheme, dataType=npType)
for index in range(frame.numberOfBursts):
curBurst = frame.bursts[index]
@ -339,9 +339,6 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d
#def runMergeBursts(self):
def main(iargs=None):
'''
Merge burst products to make it look like stripmap.
@ -349,7 +346,7 @@ def main(iargs=None):
'''
inps=cmdLineParse(iargs)
virtual = inps.useVirtualFiles
swathList = ut.getSwathList(inps.reference)
referenceFrames = []
frames=[]
@ -378,20 +375,21 @@ def main(iargs=None):
if minBurst==maxBurst:
print('Skipping processing of swath {0}'.format(swath))
continue
if inps.stack:
minStack = stack.bursts[0].burstNumber
print('Updating the valid region of each burst to the common valid region of the stack')
for ii in range(minBurst, maxBurst + 1):
ifg.bursts[ii-minBurst].firstValidLine = stack.bursts[ii-minStack].firstValidLine
ifg.bursts[ii-minBurst].firstValidLine = stack.bursts[ii-minStack].firstValidLine
ifg.bursts[ii-minBurst].firstValidSample = stack.bursts[ii-minStack].firstValidSample
ifg.bursts[ii-minBurst].numValidLines = stack.bursts[ii-minStack].numValidLines
ifg.bursts[ii-minBurst].numValidSamples = stack.bursts[ii-minStack].numValidSamples
ifg.bursts[ii-minBurst].numValidLines = stack.bursts[ii-minStack].numValidLines
ifg.bursts[ii-minBurst].numValidSamples = stack.bursts[ii-minStack].numValidSamples
frames.append(ifg)
referenceFrames.append(reference)
print('bursts: ', minBurst, maxBurst)
fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1])) for x in range(minBurst, maxBurst+1)])
fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1]))
for x in range(minBurst, maxBurst+1)])
mergedir = os.path.dirname(inps.outfile)
os.makedirs(mergedir, exist_ok=True)
@ -399,10 +397,7 @@ def main(iargs=None):
suffix = '.full'
if (inps.numberRangeLooks == 1) and (inps.numberAzimuthLooks==1):
suffix=''
####Virtual flag is ignored for multi-swath data
####Virtual flag is ignored for multi-swath data
if (not virtual):
print('User requested for multi-swath stitching.')
print('Virtual files are the only option for this.')
@ -417,10 +412,12 @@ def main(iargs=None):
print(inps.multilook)
if inps.multilook:
multilook(inps.outfile+suffix, outname = inps.outfile,
alks = inps.numberAzimuthLooks, rlks=inps.numberRangeLooks,
multilook_tool=inps.multilookTool, no_data=inps.noData)
multilook(inps.outfile+suffix,
outname=inps.outfile,
alks=inps.numberAzimuthLooks,
rlks=inps.numberRangeLooks,
multilook_tool=inps.multilookTool,
no_data=inps.noData)
else:
print('Skipping multi-looking ....')
@ -428,5 +425,4 @@ if __name__ == '__main__' :
'''
Merge products burst-by-burst.
'''
main()

View File

@ -5,11 +5,11 @@ import os
#from isceobj.Sensor.TOPS.coregSwathSLCProduct import coregSwathSLCProduct
class catalog(object):
def __init__(self):
pass
def __init__(self):
pass
def addItem(self,*args):
print(' '.join([str(x) for x in args]))
def addItem(self,*args):
print(' '.join([str(x) for x in args]))
@ -29,22 +29,23 @@ def loadProduct(xmlname):
def saveProduct( obj, xmlname):
'''
Save the product to an XML file using Product Manager.
'''
# import shelve
# import os
# with shelve.open(os.path.dirname(xmlname) + '/'+ os.path.basename(xmlname) +'.data') as db:
# db['data'] = obj
'''
Save the product to an XML file using Product Manager.
'''
import shelve
import os
with shelve.open(os.path.dirname(xmlname) + '/'+ os.path.basename(xmlname) +'.data') as db:
db['data'] = obj
from iscesys.Component.ProductManager import ProductManager as PM
from iscesys.Component.ProductManager import ProductManager as PM
pm = PM()
pm.configure()
pm = PM()
pm.configure()
pm.dumpProduct(obj, xmlname)
pm.dumpProduct(obj, xmlname)
return None
return None
def getRelativeShifts(mFrame, sFrame, minBurst, maxBurst, secondaryBurstStart):
'''
@ -152,31 +153,32 @@ def adjustValidSampleLine_V2(reference, secondary, minAz=0, maxAz=0, minRng=0, m
if (minAz > 0) and (maxAz > 0):
reference.firstValidLine = secondary.firstValidLine - int(np.floor(maxAz) - 4)
lastValidLine = reference.firstValidLine - 8 + secondary.numValidLines
if lastValidLine < reference.numberOfLines:
reference.numValidLines = secondary.numValidLines - 8
else:
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
reference.firstValidLine = secondary.firstValidLine - int(np.floor(maxAz) - 4)
lastValidLine = reference.firstValidLine - 8 + secondary.numValidLines
if lastValidLine < reference.numberOfLines:
reference.numValidLines = secondary.numValidLines - 8
else:
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
elif (minAz < 0) and (maxAz < 0):
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
lastValidLine = reference.firstValidLine + secondary.numValidLines - 8
if lastValidLine < reference.numberOfLines:
reference.numValidLines = secondary.numValidLines - 8
else:
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
lastValidLine = reference.firstValidLine + secondary.numValidLines - 8
if lastValidLine < reference.numberOfLines:
reference.numValidLines = secondary.numValidLines - 8
else:
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
elif (minAz < 0) and (maxAz > 0):
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
lastValidLine = reference.firstValidLine + secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
if lastValidLine < reference.numberOfLines:
reference.numValidLines = secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
else:
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
lastValidLine = reference.firstValidLine + secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
if lastValidLine < reference.numberOfLines:
reference.numValidLines = secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
else:
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
return reference
def adjustCommonValidRegion(reference,secondary):
# valid lines between reference and secondary
@ -264,13 +266,14 @@ def asBaseClass(inobj):
else:
raise Exception('Cannot be converted to TOPSSwathSLCProduct')
def getSwathList(indir):
swathList = []
for x in [1,2,3]:
SW = os.path.join(indir,'IW{0}'.format(x))
if os.path.exists(SW):
swathList.append(x)
SW = os.path.join(indir,'IW{0}'.format(x))
if os.path.exists(SW):
swathList.append(x)
return swathList

View File

@ -160,9 +160,15 @@ def createParser():
parser.add_argument('-e', '--esd_coherence_threshold', dest='esdCoherenceThreshold', type=str, default='0.85',
help='Coherence threshold for estimating azimuth misregistration using enhanced spectral diversity (default: %(default)s).')
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram', choices=['slc', 'correlation', 'interferogram', 'offset'],
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='interferogram',
choices=['slc', 'correlation', 'interferogram', 'offset'],
help='The InSAR processing workflow (default: %(default)s).')
parser.add_argument('-V', '--virtual_merge', dest='virtualMerge', type=str, default=None, choices=['True', 'False'],
help='Use virtual files for the merged SLCs and geometry files.\n'
'Default: True for correlation / interferogram workflow\n'
' False for slc / offset workflow')
parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False,
help='Allow App to use GPU when available')
@ -531,15 +537,16 @@ def slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_di
def correlationStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack):
#############################
i = slcStack(inps, acquisitionDates,stackReferenceDate, secondaryDates, safe_dict, updateStack)
# default value of virtual_merge
virtual_merge = 'True' if not inps.virtualMerge else inps.virtualMerge
i+=1
runObj = run()
runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i))
runObj.mergeReference(stackReferenceDate, virtual = 'True')
runObj.mergeSecondarySLC(secondaryDates, virtual = 'True')
runObj.mergeReference(stackReferenceDate, virtual = virtual_merge)
runObj.mergeSecondarySLC(secondaryDates, virtual = virtual_merge)
runObj.finalize()
i+=1
@ -559,11 +566,14 @@ def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDate
i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack)
# default value of virtual_merge
virtual_merge = 'True' if not inps.virtualMerge else inps.virtualMerge
i+=1
runObj = run()
runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i))
runObj.mergeReference(stackReferenceDate, virtual = 'True')
runObj.mergeSecondarySLC(secondaryDates, virtual = 'True')
runObj.mergeReference(stackReferenceDate, virtual = virtual_merge)
runObj.mergeSecondarySLC(secondaryDates, virtual = virtual_merge)
runObj.finalize()
i+=1
@ -595,11 +605,14 @@ def offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe
i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack)
# default value of virtual_merge
virtual_merge = 'False' if not inps.virtualMerge else inps.virtualMerge
i+=1
runObj = run()
runObj.configure(inps, 'run_{:02d}_merge_reference_secondary_slc'.format(i))
runObj.mergeReference(stackReferenceDate, virtual = 'False')
runObj.mergeSecondarySLC(secondaryDates, virtual = 'False')
runObj.mergeReference(stackReferenceDate, virtual = virtual_merge)
runObj.mergeSecondarySLC(secondaryDates, virtual = virtual_merge)
runObj.finalize()
i+=1