topsStack: mergeBursts & s1a_isce_utils: indentation adjustment
Adjust indentations in the following scripts following pep8 style for improved readibility: + topsStack/mergegBursts.py + topsStack/s1a_isce_utils.pyLT1AB
parent
44deb3991c
commit
dea56dac56
|
@ -5,17 +5,18 @@
|
|||
# Heresh Fattahi, updated for stack processing
|
||||
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import isce
|
||||
import isceobj
|
||||
import glob
|
||||
import datetime
|
||||
import logging
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj.Util.ImageUtil import ImageLib as IML
|
||||
from isceobj.Util.decorators import use_api
|
||||
import s1a_isce_utils as ut
|
||||
import glob
|
||||
|
||||
|
||||
def createParser():
|
||||
|
@ -25,46 +26,45 @@ def createParser():
|
|||
|
||||
parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel swaths')
|
||||
parser.add_argument('-i', '--inp_reference', type=str, dest='reference', required=True,
|
||||
help='Directory with the reference image')
|
||||
help='Directory with the reference image')
|
||||
|
||||
parser.add_argument('-s', '--stack', type=str, dest='stack', default = None,
|
||||
help='Directory with the stack xml files which includes the common valid region of the stack')
|
||||
help='Directory with the stack xml files which includes the common valid region of the stack')
|
||||
|
||||
parser.add_argument('-d', '--dirname', type=str, dest='dirname', required=True,
|
||||
help='directory with products to merge')
|
||||
help='directory with products to merge')
|
||||
|
||||
parser.add_argument('-o', '--outfile', type=str, dest='outfile', required=True,
|
||||
help='Output merged file')
|
||||
help='Output merged file')
|
||||
|
||||
parser.add_argument('-m', '--method', type=str, dest='method', default='avg',
|
||||
help = 'Method: top / bot/ avg')
|
||||
help='Method: top / bot/ avg')
|
||||
|
||||
parser.add_argument('-a', '--aligned', action='store_true', dest='isaligned',
|
||||
default=False, help='Use reference information instead of coreg for merged grid.')
|
||||
parser.add_argument('-a', '--aligned', action='store_true', dest='isaligned', default=False,
|
||||
help='Use reference information instead of coreg for merged grid.')
|
||||
|
||||
parser.add_argument('-l', '--multilook', action='store_true', dest='multilook', default=False,
|
||||
help = 'Multilook the merged products. True or False')
|
||||
help='Multilook the merged products. True or False')
|
||||
|
||||
parser.add_argument('-A', '--azimuth_looks', type=str, dest='numberAzimuthLooks', default=3,
|
||||
help = 'azimuth looks')
|
||||
parser.add_argument('-A', '--azimuth_looks', type=str, dest='numberAzimuthLooks', default=3, help='azimuth looks')
|
||||
|
||||
parser.add_argument('-R', '--range_looks', type=str, dest='numberRangeLooks', default=9,
|
||||
help = 'range looks')
|
||||
parser.add_argument('-R', '--range_looks', type=str, dest='numberRangeLooks', default=9, help='range looks')
|
||||
|
||||
parser.add_argument('-n', '--name_pattern', type=str, dest='namePattern', default='fine*int',
|
||||
help = 'a name pattern of burst products that will be merged. default: fine. it can be lat, lon, los, burst, hgt, shadowMask, incLocal')
|
||||
help='a name pattern of burst products that will be merged. '
|
||||
'default: fine. it can be lat, lon, los, burst, hgt, shadowMask, incLocal')
|
||||
|
||||
parser.add_argument('-v', '--valid_only', action='store_true', dest='validOnly', default=False,
|
||||
help = 'True for SLC, int and coherence. False for geometry files (lat, lon, los, hgt, shadowMask, incLocal).')
|
||||
help='True for SLC, int and coherence. False for geometry files (lat, lon, los, hgt, shadowMask, incLocal).')
|
||||
|
||||
parser.add_argument('-u', '--use_virtual_files', action='store_true', dest='useVirtualFiles', default=False,
|
||||
help = 'writing only a vrt of merged file. Default: True.')
|
||||
help='writing only a vrt of merged file. Default: True.')
|
||||
|
||||
parser.add_argument('-M', '--multilook_tool', type=str, dest='multilookTool', default='isce',
|
||||
help = 'The tool used for multi-looking')
|
||||
help='The tool used for multi-looking')
|
||||
|
||||
parser.add_argument('-N', '--no_data_value', type=float, dest='noData', default=None,
|
||||
help = 'no data value when gdal is used for multi-looking')
|
||||
help='no data value when gdal is used for multi-looking')
|
||||
|
||||
return parser
|
||||
|
||||
|
@ -102,7 +102,7 @@ def mergeBurstsVirtual(frame, referenceFrame, fileList, outfile, validOnly=True)
|
|||
rightSwath = max(refSwaths, key = lambda x: x.farRange)
|
||||
|
||||
|
||||
totalWidth = int( np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1))
|
||||
totalWidth = int(np.round((rightSwath.farRange - leftSwath.nearRange)/leftSwath.dr + 1))
|
||||
totalLength = int(np.round((botSwath.sensingStop - topSwath.sensingStart).total_seconds()/topSwath.dt + 1 ))
|
||||
|
||||
|
||||
|
@ -194,7 +194,7 @@ def mergeBursts(frame, fileList, outfile,
|
|||
linecount = start
|
||||
|
||||
outMap = IML.memmap(outfile, mode='write', nchannels=bands,
|
||||
nxx=width, nyy=nLines, scheme=scheme, dataType=npType)
|
||||
nxx=width, nyy=nLines, scheme=scheme, dataType=npType)
|
||||
|
||||
for index in range(frame.numberOfBursts):
|
||||
curBurst = frame.bursts[index]
|
||||
|
@ -339,9 +339,6 @@ def multilook(infile, outname=None, alks=5, rlks=15, multilook_tool="isce", no_d
|
|||
|
||||
|
||||
|
||||
|
||||
|
||||
#def runMergeBursts(self):
|
||||
def main(iargs=None):
|
||||
'''
|
||||
Merge burst products to make it look like stripmap.
|
||||
|
@ -383,15 +380,16 @@ def main(iargs=None):
|
|||
minStack = stack.bursts[0].burstNumber
|
||||
print('Updating the valid region of each burst to the common valid region of the stack')
|
||||
for ii in range(minBurst, maxBurst + 1):
|
||||
ifg.bursts[ii-minBurst].firstValidLine = stack.bursts[ii-minStack].firstValidLine
|
||||
ifg.bursts[ii-minBurst].firstValidLine = stack.bursts[ii-minStack].firstValidLine
|
||||
ifg.bursts[ii-minBurst].firstValidSample = stack.bursts[ii-minStack].firstValidSample
|
||||
ifg.bursts[ii-minBurst].numValidLines = stack.bursts[ii-minStack].numValidLines
|
||||
ifg.bursts[ii-minBurst].numValidSamples = stack.bursts[ii-minStack].numValidSamples
|
||||
ifg.bursts[ii-minBurst].numValidLines = stack.bursts[ii-minStack].numValidLines
|
||||
ifg.bursts[ii-minBurst].numValidSamples = stack.bursts[ii-minStack].numValidSamples
|
||||
|
||||
frames.append(ifg)
|
||||
referenceFrames.append(reference)
|
||||
print('bursts: ', minBurst, maxBurst)
|
||||
fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1])) for x in range(minBurst, maxBurst+1)])
|
||||
fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1]))
|
||||
for x in range(minBurst, maxBurst+1)])
|
||||
|
||||
mergedir = os.path.dirname(inps.outfile)
|
||||
os.makedirs(mergedir, exist_ok=True)
|
||||
|
@ -399,10 +397,7 @@ def main(iargs=None):
|
|||
suffix = '.full'
|
||||
if (inps.numberRangeLooks == 1) and (inps.numberAzimuthLooks==1):
|
||||
suffix=''
|
||||
|
||||
|
||||
####Virtual flag is ignored for multi-swath data
|
||||
|
||||
####Virtual flag is ignored for multi-swath data
|
||||
if (not virtual):
|
||||
print('User requested for multi-swath stitching.')
|
||||
print('Virtual files are the only option for this.')
|
||||
|
@ -417,10 +412,12 @@ def main(iargs=None):
|
|||
|
||||
print(inps.multilook)
|
||||
if inps.multilook:
|
||||
multilook(inps.outfile+suffix, outname = inps.outfile,
|
||||
alks = inps.numberAzimuthLooks, rlks=inps.numberRangeLooks,
|
||||
multilook_tool=inps.multilookTool, no_data=inps.noData)
|
||||
|
||||
multilook(inps.outfile+suffix,
|
||||
outname=inps.outfile,
|
||||
alks=inps.numberAzimuthLooks,
|
||||
rlks=inps.numberRangeLooks,
|
||||
multilook_tool=inps.multilookTool,
|
||||
no_data=inps.noData)
|
||||
else:
|
||||
print('Skipping multi-looking ....')
|
||||
|
||||
|
@ -428,5 +425,4 @@ if __name__ == '__main__' :
|
|||
'''
|
||||
Merge products burst-by-burst.
|
||||
'''
|
||||
|
||||
main()
|
||||
|
|
|
@ -5,11 +5,11 @@ import os
|
|||
#from isceobj.Sensor.TOPS.coregSwathSLCProduct import coregSwathSLCProduct
|
||||
|
||||
class catalog(object):
|
||||
def __init__(self):
|
||||
pass
|
||||
def __init__(self):
|
||||
pass
|
||||
|
||||
def addItem(self,*args):
|
||||
print(' '.join([str(x) for x in args]))
|
||||
def addItem(self,*args):
|
||||
print(' '.join([str(x) for x in args]))
|
||||
|
||||
|
||||
|
||||
|
@ -29,22 +29,23 @@ def loadProduct(xmlname):
|
|||
|
||||
|
||||
def saveProduct( obj, xmlname):
|
||||
'''
|
||||
Save the product to an XML file using Product Manager.
|
||||
'''
|
||||
# import shelve
|
||||
# import os
|
||||
# with shelve.open(os.path.dirname(xmlname) + '/'+ os.path.basename(xmlname) +'.data') as db:
|
||||
# db['data'] = obj
|
||||
'''
|
||||
Save the product to an XML file using Product Manager.
|
||||
'''
|
||||
import shelve
|
||||
import os
|
||||
with shelve.open(os.path.dirname(xmlname) + '/'+ os.path.basename(xmlname) +'.data') as db:
|
||||
db['data'] = obj
|
||||
|
||||
from iscesys.Component.ProductManager import ProductManager as PM
|
||||
from iscesys.Component.ProductManager import ProductManager as PM
|
||||
|
||||
pm = PM()
|
||||
pm.configure()
|
||||
pm = PM()
|
||||
pm.configure()
|
||||
|
||||
pm.dumpProduct(obj, xmlname)
|
||||
pm.dumpProduct(obj, xmlname)
|
||||
|
||||
return None
|
||||
|
||||
return None
|
||||
|
||||
def getRelativeShifts(mFrame, sFrame, minBurst, maxBurst, secondaryBurstStart):
|
||||
'''
|
||||
|
@ -152,31 +153,32 @@ def adjustValidSampleLine_V2(reference, secondary, minAz=0, maxAz=0, minRng=0, m
|
|||
|
||||
if (minAz > 0) and (maxAz > 0):
|
||||
|
||||
reference.firstValidLine = secondary.firstValidLine - int(np.floor(maxAz) - 4)
|
||||
lastValidLine = reference.firstValidLine - 8 + secondary.numValidLines
|
||||
if lastValidLine < reference.numberOfLines:
|
||||
reference.numValidLines = secondary.numValidLines - 8
|
||||
else:
|
||||
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
|
||||
reference.firstValidLine = secondary.firstValidLine - int(np.floor(maxAz) - 4)
|
||||
lastValidLine = reference.firstValidLine - 8 + secondary.numValidLines
|
||||
if lastValidLine < reference.numberOfLines:
|
||||
reference.numValidLines = secondary.numValidLines - 8
|
||||
else:
|
||||
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
|
||||
|
||||
elif (minAz < 0) and (maxAz < 0):
|
||||
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
|
||||
lastValidLine = reference.firstValidLine + secondary.numValidLines - 8
|
||||
if lastValidLine < reference.numberOfLines:
|
||||
reference.numValidLines = secondary.numValidLines - 8
|
||||
else:
|
||||
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
|
||||
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
|
||||
lastValidLine = reference.firstValidLine + secondary.numValidLines - 8
|
||||
if lastValidLine < reference.numberOfLines:
|
||||
reference.numValidLines = secondary.numValidLines - 8
|
||||
else:
|
||||
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
|
||||
|
||||
elif (minAz < 0) and (maxAz > 0):
|
||||
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
|
||||
lastValidLine = reference.firstValidLine + secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
|
||||
if lastValidLine < reference.numberOfLines:
|
||||
reference.numValidLines = secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
|
||||
else:
|
||||
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
|
||||
reference.firstValidLine = secondary.firstValidLine - int(np.floor(minAz) - 4)
|
||||
lastValidLine = reference.firstValidLine + secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
|
||||
if lastValidLine < reference.numberOfLines:
|
||||
reference.numValidLines = secondary.numValidLines + int(np.floor(minAz) - 8) - int(np.ceil(maxAz))
|
||||
else:
|
||||
reference.numValidLines = reference.numberOfLines - reference.firstValidLine
|
||||
|
||||
return reference
|
||||
|
||||
|
||||
def adjustCommonValidRegion(reference,secondary):
|
||||
# valid lines between reference and secondary
|
||||
|
||||
|
@ -264,13 +266,14 @@ def asBaseClass(inobj):
|
|||
else:
|
||||
raise Exception('Cannot be converted to TOPSSwathSLCProduct')
|
||||
|
||||
|
||||
def getSwathList(indir):
|
||||
|
||||
swathList = []
|
||||
for x in [1,2,3]:
|
||||
SW = os.path.join(indir,'IW{0}'.format(x))
|
||||
if os.path.exists(SW):
|
||||
swathList.append(x)
|
||||
SW = os.path.join(indir,'IW{0}'.format(x))
|
||||
if os.path.exists(SW):
|
||||
swathList.append(x)
|
||||
|
||||
return swathList
|
||||
|
||||
|
|
Loading…
Reference in New Issue