Merge commit '64768d4' into cmake

LT1AB
Ryan Burns 2020-04-08 11:05:14 -07:00
commit 4057a645a2
51 changed files with 516 additions and 571 deletions

View File

@ -72,7 +72,7 @@ jobs:
set -ex set -ex
pwd pwd
. /opt/conda/bin/activate root . /opt/conda/bin/activate root
export ISCE_HOME=/root/project/install/isce ISCE_HOME=/root/project/install/isce
export PATH="$ISCE_HOME/bin:$ISCE_HOME/applications:/opt/conda/bin:$PATH" export PATH="$ISCE_HOME/bin:$ISCE_HOME/applications:/opt/conda/bin:$PATH"
export PYTHONPATH="/root/project/install:$PYTHONPATH" export PYTHONPATH="/root/project/install:$PYTHONPATH"
export LD_LIBRARY_PATH="/opt/conda/lib:$LD_LIBRARY_PATH" export LD_LIBRARY_PATH="/opt/conda/lib:$LD_LIBRARY_PATH"

View File

@ -32,7 +32,12 @@ version = release_history # compatibility alias
__version__ = release_version __version__ = release_version
import sys, os import sys, os
isce_path = os.path.split(os.path.abspath(__file__))[0] isce_path = os.path.dirname(os.path.abspath(__file__))
import logging
from logging.config import fileConfig as _fc
_fc(os.path.join(isce_path, 'defaults', 'logging', 'logging.conf'))
sys.path.insert(1,isce_path) sys.path.insert(1,isce_path)
sys.path.insert(1,os.path.join(isce_path,'applications')) sys.path.insert(1,os.path.join(isce_path,'applications'))
sys.path.insert(1,os.path.join(isce_path,'components')) sys.path.insert(1,os.path.join(isce_path,'components'))

View File

@ -31,12 +31,8 @@
import os
import math import math
import logging from isce import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
Compatibility.checkPythonVersion() Compatibility.checkPythonVersion()
from isceobj.Location.Peg import Peg from isceobj.Location.Peg import Peg

View File

@ -30,11 +30,7 @@
import os from isce import logging
import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
Compatibility.checkPythonVersion() Compatibility.checkPythonVersion()
from iscesys.Component.FactoryInit import FactoryInit from iscesys.Component.FactoryInit import FactoryInit

View File

@ -30,11 +30,7 @@
import os from isce import logging
import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
import isceobj import isceobj
from iscesys.Component.FactoryInit import FactoryInit from iscesys.Component.FactoryInit import FactoryInit

View File

@ -34,6 +34,8 @@ import sys
import os import os
import argparse import argparse
from contrib.demUtils import createDemStitcher from contrib.demUtils import createDemStitcher
def main(): def main():
#if not argument provided force the --help flag #if not argument provided force the --help flag
if(len(sys.argv) == 1): if(len(sys.argv) == 1):
@ -132,8 +134,12 @@ def main():
print('Could not create a stitched DEM. Some tiles are missing') print('Could not create a stitched DEM. Some tiles are missing')
else: else:
if(args.correct): if(args.correct):
ds.correct()
#ds.correct(args.output,args.source,width,min(lat[0],lat[1]),min(lon[0],lon[1])) #ds.correct(args.output,args.source,width,min(lat[0],lat[1]),min(lon[0],lon[1]))
demImg = ds.correct()
# replace filename with full path including dir in which file is located
demImg.filename = os.path.abspath(os.path.join(args.dir, demImg.filename))
demImg.setAccessMode('READ')
demImg.renderHdr()
else: else:
print('Error. The --bbox (or -b) option must be specified when --action stitch is used') print('Error. The --bbox (or -b) option must be specified when --action stitch is used')
raise ValueError raise ValueError

View File

@ -30,12 +30,8 @@
import os
import datetime import datetime
import logging from isce import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
Compatibility.checkPythonVersion() Compatibility.checkPythonVersion()
from iscesys.Component.FactoryInit import FactoryInit from iscesys.Component.FactoryInit import FactoryInit

View File

@ -1,9 +1,12 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
import os
import argparse
import isce import isce
import isceobj import isceobj
import argparse from isceobj.Util.ImageUtil import ImageLib as IML
import os
def cmdLineParse(): def cmdLineParse():
''' '''
@ -13,19 +16,14 @@ def cmdLineParse():
parser = argparse.ArgumentParser(description='Fixes pathnames in ISCE image XML files. Can be used to do more things in the future.') parser = argparse.ArgumentParser(description='Fixes pathnames in ISCE image XML files. Can be used to do more things in the future.')
parser.add_argument('-i', '--input', type=str, required=True, dest='infile', parser.add_argument('-i', '--input', type=str, required=True, dest='infile',
help = 'Input image for which the XML file needs to be fixed.') help = 'Input image for which the XML file needs to be fixed.')
parser.add_argument('-f', '--full', action='store_true', default=False, dest='full',
fname = parser.add_mutually_exclusive_group(required=True)
fname.add_argument('-f', '--full', action='store_false',
help = 'Replace filename with full path including dir in which file is located') help = 'Replace filename with full path including dir in which file is located')
parser.add_argument('-b', '--base', action='store_true', default=False, dest='base', fname.add_argument('-b', '--base', action='store_true',
help = 'Replace filename with basename to use in current directory') help = 'Replace filename with basename to use in current directory')
inps = parser.parse_args() inps = parser.parse_args()
if (inps.full and inps.base):
raise Exception('User requested to use both full path and basename')
if (not inps.full) and (not inps.base):
raise Exception('User did not request any change')
return inps return inps
@ -33,8 +31,6 @@ if __name__ == '__main__':
''' '''
Main driver. Main driver.
''' '''
from imageMath import IML
inps = cmdLineParse() inps = cmdLineParse()
if inps.infile.endswith('.xml'): if inps.infile.endswith('.xml'):
@ -42,15 +38,15 @@ if __name__ == '__main__':
dirname = os.path.dirname(inps.infile) dirname = os.path.dirname(inps.infile)
img, dataname, metaName = IML.loadImage(inps.infile) img = IML.loadImage(inps.infile)[0]
if inps.full: if inps.full:
fname = os.path.abspath( os.path.join(dirname, os.path.basename(inps.infile))) fname = os.path.abspath( os.path.join(dirname, os.path.basename(inps.infile)))
elif inps.base:
fname = os.path.basename( os.path.basename(inps.infile))
else: else:
raise Exception('Unknown state in {0}'.format(os.path.basename(__file__))) fname = os.path.basename( os.path.basename(inps.infile))
img.filename = fname img.filename = fname
img.setAccessMode('READ') img.setAccessMode('READ')
img.renderHdr() img.renderHdr()

View File

@ -30,12 +30,8 @@
import os
import math import math
import logging from isce import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
import isceobj import isceobj
from iscesys.Component.FactoryInit import FactoryInit from iscesys.Component.FactoryInit import FactoryInit
from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU

View File

@ -34,8 +34,7 @@ from __future__ import print_function
import time import time
import os import os
import sys import sys
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -46,11 +45,6 @@ from iscesys.Component.Configurable import SELF
import isceobj.InsarProc as InsarProc import isceobj.InsarProc as InsarProc
from isceobj.Scene.Frame import FrameMixin from isceobj.Scene.Frame import FrameMixin
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.insar') logger = logging.getLogger('isce.insar')

View File

@ -41,8 +41,7 @@ import datetime
import os import os
import sys import sys
import math import math
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -1438,11 +1437,6 @@ class IsceApp(Application, FrameMixin):
sys.exit("Could not find the output directory: %s" % self.outputDir) sys.exit("Could not find the output directory: %s" % self.outputDir)
os.chdir(self.outputDir) ##change working directory to given output directory os.chdir(self.outputDir) ##change working directory to given output directory
##read configfile only here so that log path is in output directory
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.isceProc') logger = logging.getLogger('isce.isceProc')
logger.info(self.intromsg) logger.info(self.intromsg)
self._isce.dataDirectory = self.outputDir self._isce.dataDirectory = self.outputDir

View File

@ -27,16 +27,8 @@
# Author: Walter Szeliga # Author: Walter Szeliga
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import os
import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
import isce import isce
from isce import logging
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
from iscesys.Component.Component import Component, Port from iscesys.Component.Component import Component, Port
from isceobj.Planet.Ellipsoid import Ellipsoid from isceobj.Planet.Ellipsoid import Ellipsoid

View File

@ -30,10 +30,8 @@
import time import time
import os
import sys import sys
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -44,11 +42,6 @@ from iscesys.Component.Configurable import SELF
from isceobj import RtcProc from isceobj import RtcProc
from isceobj.Util.decorators import use_api from isceobj.Util.decorators import use_api
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.grdsar') logger = logging.getLogger('isce.grdsar')

View File

@ -27,13 +27,9 @@
# Authors: Giangi Sacco, Eric Gurrola # Authors: Giangi Sacco, Eric Gurrola
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
import time import time
import os
import sys import sys
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -43,11 +39,6 @@ from iscesys.Compatibility import Compatibility
from iscesys.Component.Configurable import SELF from iscesys.Component.Configurable import SELF
from isceobj import ScansarProc from isceobj import ScansarProc
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.insar') logger = logging.getLogger('isce.insar')

View File

@ -35,10 +35,8 @@
from __future__ import print_function from __future__ import print_function
import time import time
import os
import sys import sys
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -50,11 +48,6 @@ import isceobj.StripmapProc as StripmapProc
from isceobj.Scene.Frame import FrameMixin from isceobj.Scene.Frame import FrameMixin
from isceobj.Util.decorators import use_api from isceobj.Util.decorators import use_api
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.insar') logger = logging.getLogger('isce.insar')
@ -391,6 +384,13 @@ RENDERER = Application.Parameter(
) )
) )
DISPERSIVE_FILTER_FILLING_METHOD = Application.Parameter('dispersive_filling_method',
public_name = 'dispersive filter filling method',
default='nearest_neighbour',
type=str,
mandatory=False,
doc='method to fill the holes left by masking the ionospheric phase estimate')
DISPERSIVE_FILTER_KERNEL_XSIZE = Application.Parameter('kernel_x_size', DISPERSIVE_FILTER_KERNEL_XSIZE = Application.Parameter('kernel_x_size',
public_name='dispersive filter kernel x-size', public_name='dispersive filter kernel x-size',
default=800, default=800,
@ -446,7 +446,6 @@ DISPERSIVE_FILTER_COHERENCE_THRESHOLD = Application.Parameter('dispersive_filter
type=float, type=float,
mandatory=False, mandatory=False,
doc='Coherence threshold to generate a mask file which gets used in the iterative filtering of the dispersive and non-disperive phase') doc='Coherence threshold to generate a mask file which gets used in the iterative filtering of the dispersive and non-disperive phase')
#Facility declarations #Facility declarations
MASTER = Application.Facility( MASTER = Application.Facility(
@ -555,6 +554,7 @@ class _RoiBase(Application, FrameMixin):
PICKLE_LOAD_DIR, PICKLE_LOAD_DIR,
RENDERER, RENDERER,
DO_DISPERSIVE, DO_DISPERSIVE,
DISPERSIVE_FILTER_FILLING_METHOD,
DISPERSIVE_FILTER_KERNEL_XSIZE, DISPERSIVE_FILTER_KERNEL_XSIZE,
DISPERSIVE_FILTER_KERNEL_YSIZE, DISPERSIVE_FILTER_KERNEL_YSIZE,
DISPERSIVE_FILTER_KERNEL_SIGMA_X, DISPERSIVE_FILTER_KERNEL_SIGMA_X,

View File

@ -34,10 +34,8 @@
import time import time
import os
import sys import sys
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -47,11 +45,6 @@ from iscesys.Compatibility import Compatibility
from iscesys.Component.Configurable import SELF from iscesys.Component.Configurable import SELF
from isceobj import TopsProc from isceobj import TopsProc
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.insar') logger = logging.getLogger('isce.insar')

View File

@ -30,10 +30,8 @@
import time import time
import os
import sys import sys
import logging from isce import logging
import logging.config
import isce import isce
import isceobj import isceobj
@ -42,11 +40,6 @@ from isce.applications.topsApp import TopsInSAR
from iscesys.Component.Application import Application from iscesys.Component.Application import Application
from isceobj.Util.decorators import use_api from isceobj.Util.decorators import use_api
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults', 'logging',
'logging.conf')
)
logger = logging.getLogger('isce.insar') logger = logging.getLogger('isce.insar')
WINDOW_SIZE_WIDTH = Application.Parameter( WINDOW_SIZE_WIDTH = Application.Parameter(

View File

@ -27,14 +27,7 @@
# Author: Walter Szeliga # Author: Walter Szeliga
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from isce import logging
import os
import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
Compatibility.checkPythonVersion() Compatibility.checkPythonVersion()
from iscesys.Component.FactoryInit import FactoryInit from iscesys.Component.FactoryInit import FactoryInit

View File

@ -29,11 +29,7 @@
import math import math
import os from isce import logging
import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from isceobj.Util.decorators import type_check, force, pickled, logged from isceobj.Util.decorators import type_check, force, pickled, logged
import numpy as np import numpy as np

View File

@ -27,14 +27,7 @@
# Author: Walter Szeliga # Author: Walter Szeliga
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
from isce import logging
import os
import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from isceobj.Sensor.ERS import ERS from isceobj.Sensor.ERS import ERS
from isceobj.Scene.Track import Track from isceobj.Scene.Track import Track
logger = logging.getLogger("testTrack") logger = logging.getLogger("testTrack")

View File

@ -493,15 +493,9 @@ class Sentinel1(Component):
offset = np.int(np.rint((aslice.product.bursts[0].burstStartUTC - t0).total_seconds() / burstStartInterval.total_seconds())) offset = np.int(np.rint((aslice.product.bursts[0].burstStartUTC - t0).total_seconds() / burstStartInterval.total_seconds()))
for kk in range(aslice.product.numberOfBursts): for kk in range(aslice.product.numberOfBursts):
#####Overwrite previous copy if one exists #####Skip appending if burst also exists from previous scene
if (offset+kk) < len(self.product.bursts): if (offset+kk) < len(self.product.bursts):
self.product.bursts[offset+kk] = aslice.product.bursts[kk] continue
####Keep track of tiff src files
if len(self.tiff):
self._tiffSrc[offset+kk] = aslice.tiff[0]
self._elevationAngleVsTau[offset+kk] = aslice._elevationAngleVsTau[kk]
elif (offset+kk) == len(self.product.bursts): elif (offset+kk) == len(self.product.bursts):
self.product.bursts.append(aslice.product.bursts[kk]) self.product.bursts.append(aslice.product.bursts[kk])

View File

@ -3,17 +3,12 @@
# #
# #
import logging import logging
import os import os,gdal
import isceobj import isceobj
from isceobj.Constants import SPEED_OF_LIGHT from isceobj.Constants import SPEED_OF_LIGHT
import numpy as np import numpy as np
import gdal
try:
import cv2
except ImportError:
print('OpenCV2 does not appear to be installed / is not importable.')
print('OpenCV2 is needed for this step. You may experience failures ...')
logger = logging.getLogger('isce.insar.runDispersive') logger = logging.getLogger('isce.insar.runDispersive')
@ -29,19 +24,8 @@ def getValue(dataFile, band, y_ref, x_ref):
ds = None ds = None
return ref[0][0] return ref[0][0]
def check_consistency(lowBandIgram, highBandIgram, outputDir):
def dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, y_ref=None, x_ref=None, m=None , d=None):
jumpFile = os.path.join(outputDir , "jumps.bil")
cmd = 'imageMath.py -e="round((a_1-b_1)/(2.0*PI))" --a={0} --b={1} -o {2} -t float -s BIL'.format(lowBandIgram, highBandIgram, jumpFile)
print(cmd)
os.system(cmd)
return jumpFile
def dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, jumpFile, y_ref=None, x_ref=None, m=None , d=None):
if y_ref and x_ref: if y_ref and x_ref:
refL = getValue(lowBandIgram, 2, y_ref, x_ref) refL = getValue(lowBandIgram, 2, y_ref, x_ref)
@ -57,100 +41,80 @@ def dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispers
if m and d: if m and d:
coef = (fL*fH)/(f0*(fH**2 - fL**2)) coef = (fL*fH)/(f0*(fH**2 - fL**2))
#cmd = 'imageMath.py -e="{0}*((a_1-{8}-2*PI*c)*{1}-(b_1-{9}-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, m , d, outDispersive, refL, refH) cmd = 'imageMath.py -e="{0}*((a_1-2*PI*c)*{1}-(b_1+(2.0*PI)-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, m , d, outDispersive)
cmd = 'imageMath.py -e="{0}*((a_1-2*PI*c)*{1}-(b_1+(2.0*PI*g)-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} --g={7} -o {8} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, m , d, jumpFile, outDispersive)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
coefn = f0/(fH**2-fL**2) coefn = f0/(fH**2-fL**2)
#cmd = 'imageMath.py -e="{0}*((a_1-{8}-2*PI*c)*{1}-(b_1-{9}-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, m , d, outNonDispersive, refH, refL) cmd = 'imageMath.py -e="{0}*((a_1+(2.0*PI)-2*PI*c)*{1}-(b_1-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} -o {7} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, m , d, outNonDispersive)
cmd = 'imageMath.py -e="{0}*((a_1+(2.0*PI*g)-2*PI*c)*{1}-(b_1-2*PI*(c+f))*{2})" --a={3} --b={4} --c={5} --f={6} --g={7} -o {8} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, m , d, jumpFile, outNonDispersive)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
else: else:
coef = (fL*fH)/(f0*(fH**2 - fL**2)) coef = (fL*fH)/(f0*(fH**2 - fL**2))
#cmd = 'imageMath.py -e="{0}*((a_1-{6})*{1}-(b_1-{7})*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, outDispersive, refL, refH) cmd = 'imageMath.py -e="{0}*(a_1*{1}-(b_1+2.0*PI)*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, outDispersive)
cmd = 'imageMath.py -e="{0}*(a_1*{1}-(b_1+2.0*PI*c)*{2})" --a={3} --b={4} --c={5} -o {6} -t float32 -s BIL'.format(coef,fH, fL, lowBandIgram, highBandIgram, jumpFile, outDispersive)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
coefn = f0/(fH**2-fL**2) coefn = f0/(fH**2-fL**2)
#cmd = 'imageMath.py -e="{0}*((a_1-{6})*{1}-(b_1-{7})*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, outNonDispersive, refH, refL) cmd = 'imageMath.py -e="{0}*((a_1+2.0*PI)*{1}-(b_1)*{2})" --a={3} --b={4} -o {5} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, outNonDispersive)
cmd = 'imageMath.py -e="{0}*((a_1+2.0*PI*c)*{1}-(b_1)*{2})" --a={3} --b={4} --c={5} -o {6} -t float32 -s BIL'.format(coefn,fH, fL, highBandIgram, lowBandIgram, jumpFile, outNonDispersive)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
return None return None
def std_iono_mean_coh(f0,fL,fH,coh_mean,rgLooks,azLooks):
# From Liao et al., Remote Sensing of Environment 2018
# STD sub-band at average coherence value (Eq. 8)
Nb = (rgLooks*azLooks)/3.0
coeffA = (np.sqrt(2.0*Nb))**(-1)
coeffB = np.sqrt(1-coh_mean**2)/coh_mean
std_subbands = coeffA * coeffB
# STD Ionosphere (Eq. 7)
coeffC = np.sqrt(1+(fL/fH)**2)
coeffD = (fH*fL*fH)/(f0*(fH**2-fL**2))
std_iono = coeffC*coeffD*std_subbands
return std_iono
def theoretical_variance_fromSubBands(self, f0, fL, fH, B, Sig_phi_iono, Sig_phi_nonDisp,N): def theoretical_variance_fromSubBands(self, f0, fL, fH, B, Sig_phi_iono, Sig_phi_nonDisp,N):
# Calculating the theoretical variance of the
# ionospheric phase based on the coherence of # Calculating the theoretical variance of the ionospheric phase based on the coherence of the sub-band interferograns
# the sub-band interferograns
ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname)
lowBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename) lowBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename)
Sig_phi_L = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig") Sig_phi_L = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig")
ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname) ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.highBandSlcDirname)
#highBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".unw")
#ifgDirname = os.path.dirname(self.insar.lowBandIgram)
#lowBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename)
#Sig_phi_L = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig")
#ifgDirname = os.path.dirname(self.insar.highBandIgram)
highBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename) highBandCoherence = os.path.join(ifgDirname , self.insar.coherenceFilename)
Sig_phi_H = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig") Sig_phi_H = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename + ".sig")
#N = self.numberAzimuthLooks*self.numberRangeLooks
#PI = np.pi
#fL,f0,fH,B = getBandFrequencies(inps)
#cL = read(inps.lowBandCoherence,bands=[1])
#cL = cL[0,:,:]
#cL[cL==0.0]=0.001
cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, lowBandCoherence, Sig_phi_L) cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, lowBandCoherence, Sig_phi_L)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
#Sig_phi_L = np.sqrt(1-cL**2)/cL/np.sqrt(2.*N)
#cH = read(inps.highBandCoherence,bands=[1])
#cH = cH[0,:,:]
#cH[cH==0.0]=0.001
cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, highBandCoherence, Sig_phi_H) cmd = 'imageMath.py -e="sqrt(1-a**2)/a/sqrt(2.0*{0})" --a={1} -o {2} -t float -s BIL'.format(N, highBandCoherence, Sig_phi_H)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
#Sig_phi_H = np.sqrt(1-cH**2)/cH/np.sqrt(2.0*N)
coef = (fL*fH)/(f0*(fH**2 - fL**2)) coef = (fL*fH)/(f0*(fH**2 - fL**2))
cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_iono) cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_iono)
os.system(cmd) os.system(cmd)
#Sig_phi_iono = np.sqrt((coef**2)*(fH**2)*Sig_phi_H**2 + (coef**2)*(fL**2)*Sig_phi_L**2)
#length, width = Sig_phi_iono.shape
#outFileIono = os.path.join(inps.outDir, 'Sig_iono.bil')
#write(Sig_phi_iono, outFileIono, 1, 6)
#write_xml(outFileIono, length, width)
coef_non = f0/(fH**2 - fL**2) coef_non = f0/(fH**2 - fL**2)
cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef_non, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_nonDisp) cmd = 'imageMath.py -e="sqrt(({0}**2)*({1}**2)*(a**2) + ({0}**2)*({2}**2)*(b**2))" --a={3} --b={4} -o {5} -t float -s BIL'.format(coef_non, fL, fH, Sig_phi_L, Sig_phi_H, Sig_phi_nonDisp)
os.system(cmd) os.system(cmd)
#Sig_phi_non_dis = np.sqrt((coef_non**2) * (fH**2) * Sig_phi_H**2 + (coef_non**2) * (fL**2) * Sig_phi_L**2)
#outFileNonDis = os.path.join(inps.outDir, 'Sig_nonDis.bil')
#write(Sig_phi_non_dis, outFileNonDis, 1, 6)
#write_xml(outFileNonDis, length, width)
return None #Sig_phi_iono, Sig_phi_nonDisp return None #Sig_phi_iono, Sig_phi_nonDisp
def lowPassFilter(dataFile, sigDataFile, maskFile, Sx, Sy, sig_x, sig_y, iteration=5, theta=0.0): def lowPassFilter(self,dataFile, sigDataFile, maskFile, Sx, Sy, sig_x, sig_y, iteration=5, theta=0.0):
ds = gdal.Open(dataFile + '.vrt', gdal.GA_ReadOnly) ds = gdal.Open(dataFile + '.vrt', gdal.GA_ReadOnly)
length = ds.RasterYSize length = ds.RasterYSize
width = ds.RasterXSize width = ds.RasterXSize
@ -159,7 +123,7 @@ def lowPassFilter(dataFile, sigDataFile, maskFile, Sx, Sy, sig_x, sig_y, iterati
sigData = np.memmap(sigDataFile, dtype=np.float32, mode='r', shape=(length,width)) sigData = np.memmap(sigDataFile, dtype=np.float32, mode='r', shape=(length,width))
mask = np.memmap(maskFile, dtype=np.byte, mode='r', shape=(length,width)) mask = np.memmap(maskFile, dtype=np.byte, mode='r', shape=(length,width))
dataF, sig_dataF = iterativeFilter(dataIn[:,:], mask[:,:], sigData[:,:], iteration, Sx, Sy, sig_x, sig_y, theta) dataF, sig_dataF = iterativeFilter(self,dataIn[:,:], mask[:,:], sigData[:,:], iteration, Sx, Sy, sig_x, sig_y, theta)
filtDataFile = dataFile + ".filt" filtDataFile = dataFile + ".filt"
sigFiltDataFile = sigDataFile + ".filt" sigFiltDataFile = sigDataFile + ".filt"
@ -192,7 +156,7 @@ def write_xml(fileName,width,length,bands,dataType,scheme):
return None return None
def iterativeFilter(dataIn, mask, Sig_dataIn, iteration, Sx, Sy, sig_x, sig_y, theta=0.0): def iterativeFilter(self,dataIn, mask, Sig_dataIn, iteration, Sx, Sy, sig_x, sig_y, theta=0.0):
data = np.zeros(dataIn.shape) data = np.zeros(dataIn.shape)
data[:,:] = dataIn[:,:] data[:,:] = dataIn[:,:]
Sig_data = np.zeros(dataIn.shape) Sig_data = np.zeros(dataIn.shape)
@ -201,17 +165,30 @@ def iterativeFilter(dataIn, mask, Sig_dataIn, iteration, Sx, Sy, sig_x, sig_y, t
print ('masking the data') print ('masking the data')
data[mask==0]=np.nan data[mask==0]=np.nan
Sig_data[mask==0]=np.nan Sig_data[mask==0]=np.nan
print ('Filling the holes with nearest neighbor interpolation')
dataF = fill(data) if self.dispersive_filling_method == "smoothed":
Sig_data = fill(Sig_data) print('Filling the holes with smoothed values')
dataF = fill_with_smoothed(data,3)
Sig_data = fill_with_smoothed(Sig_data,3)
else:
print ('Filling the holes with nearest neighbor interpolation')
dataF = fill(data)
Sig_data = fill(Sig_data)
print ('Low pass Gaussian filtering the interpolated data') print ('Low pass Gaussian filtering the interpolated data')
dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0) dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0)
for i in range(iteration): for i in range(iteration):
print ('iteration: ', i , ' of ',iteration) print ('iteration: ', i , ' of ',iteration)
print ('masking the interpolated and filtered data') print ('masking the interpolated and filtered data')
dataF[mask==0]=np.nan dataF[mask==0]=np.nan
print('Filling the holes with nearest neighbor interpolation of the filtered data from previous step')
dataF = fill(dataF) if self.dispersive_filling_method == "smoothed":
print("Fill the holes with smoothed values")
dataF = fill_with_smoothed(dataF,3)
else:
print('Filling the holes with nearest neighbor interpolation of the filtered data from previous step')
dataF = fill(dataF)
print('Replace the valid pixels with original unfiltered data') print('Replace the valid pixels with original unfiltered data')
dataF[mask==1]=data[mask==1] dataF[mask==1]=data[mask==1]
dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0) dataF, Sig_dataF = Filter(dataF, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0)
@ -219,6 +196,9 @@ def iterativeFilter(dataIn, mask, Sig_dataIn, iteration, Sx, Sy, sig_x, sig_y, t
return dataF, Sig_dataF return dataF, Sig_dataF
def Filter(data, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0): def Filter(data, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0):
import cv2
kernel = Gaussian_kernel(Sx, Sy, sig_x, sig_y) #(800, 800, 15.0, 100.0) kernel = Gaussian_kernel(Sx, Sy, sig_x, sig_y) #(800, 800, 15.0, 100.0)
kernel = rotate(kernel , theta) kernel = rotate(kernel , theta)
@ -227,11 +207,6 @@ def Filter(data, Sig_data, Sx, Sy, sig_x, sig_y, theta=0.0):
W1 = cv2.filter2D(1.0/Sig_data**2,-1,kernel) W1 = cv2.filter2D(1.0/Sig_data**2,-1,kernel)
W2 = cv2.filter2D(1.0/Sig_data**2,-1,kernel**2) W2 = cv2.filter2D(1.0/Sig_data**2,-1,kernel**2)
#data = ndimage.convolve(data,kernel, mode='nearest')
#W1 = ndimage.convolve(1.0/Sig_data**2,kernel, mode='nearest')
#W2 = ndimage.convolve(1.0/Sig_data**2,kernel**2, mode='nearest')
return data/W1, np.sqrt(W2/(W1**2)) return data/W1, np.sqrt(W2/(W1**2))
def Gaussian_kernel(Sx, Sy, sig_x,sig_y): def Gaussian_kernel(Sx, Sy, sig_x,sig_y):
@ -281,7 +256,34 @@ def rotate(k , theta):
k = a*k k = a*k
return k return k
def fill_with_smoothed(off,filterSize):
from astropy.convolution import convolve
off_2filt=np.copy(off)
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
loop = 0
cnt2=1
while (cnt2!=0 & loop<100):
loop += 1
idx2= np.isnan(off_2filt)
cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt)))
print(cnt2)
if cnt2 != 0:
off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate')
off_2filt[idx2]=off_filt[idx2]
idx3 = np.where(off_filt == 0)
off_2filt[idx3]=np.nan
off_filt=None
return off_2filt
def fill(data, invalid=None): def fill(data, invalid=None):
from scipy import ndimage
""" """
Replace the value of invalid 'data' cells (indicated by 'invalid') Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell by the value of the nearest valid data cell
@ -295,8 +297,6 @@ def fill(data, invalid=None):
Output: Output:
Return a filled array. Return a filled array.
""" """
from scipy import ndimage
if invalid is None: invalid = np.isnan(data) if invalid is None: invalid = np.isnan(data)
ind = ndimage.distance_transform_edt(invalid, ind = ndimage.distance_transform_edt(invalid,
@ -305,8 +305,10 @@ def fill(data, invalid=None):
return data[tuple(ind)] return data[tuple(ind)]
def getMask(self, maskFile): def getMask(self, maskFile,std_iono):
from scipy.ndimage import median_filter
ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname) ifgDirname = os.path.join(self.insar.ifgDirname, self.insar.lowBandSlcDirname)
lowBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename ) lowBandIgram = os.path.join(ifgDirname , 'filt_' + self.insar.ifgFilename )
lowBandCor = os.path.join(ifgDirname ,self.insar.coherenceFilename) lowBandCor = os.path.join(ifgDirname ,self.insar.coherenceFilename)
@ -329,7 +331,7 @@ def getMask(self, maskFile):
else: else:
highBandIgram += '.unw' highBandIgram += '.unw'
if self.dispersive_filter_mask_type == "coherence": if (self.dispersive_filter_mask_type == "coherence") and (not self.dispersive_filter_mask_type == "median_filter"):
print ('generating a mask based on coherence files of sub-band interferograms with a threshold of {0}'.format(self.dispersive_filter_coherence_threshold)) print ('generating a mask based on coherence files of sub-band interferograms with a threshold of {0}'.format(self.dispersive_filter_coherence_threshold))
cmd = 'imageMath.py -e="(a>{0})*(b>{0})" --a={1} --b={2} -t byte -s BIL -o {3}'.format(self.dispersive_filter_coherence_threshold, lowBandCor, highBandCor, maskFile) cmd = 'imageMath.py -e="(a>{0})*(b>{0})" --a={1} --b={2} -t byte -s BIL -o {3}'.format(self.dispersive_filter_coherence_threshold, lowBandCor, highBandCor, maskFile)
os.system(cmd) os.system(cmd)
@ -339,28 +341,31 @@ def getMask(self, maskFile):
print ('generating a mask based on .conncomp files') print ('generating a mask based on .conncomp files')
cmd = 'imageMath.py -e="(a>0)*(b>0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram + '.conncomp', highBandIgram + '.conncomp', maskFile) cmd = 'imageMath.py -e="(a>0)*(b>0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram + '.conncomp', highBandIgram + '.conncomp', maskFile)
os.system(cmd) os.system(cmd)
#m = read(lowBandIgram + '.conncomp')
#m = m[0,:,:]
#m = thresholdConnectedComponents(m,minPixelConnComp)
#mask = np.ones_like((m))
#mask[m==0] = 0.0
#m = read(highBandIgram + '.conncomp') elif self.dispersive_filter_mask_type == "median_filter":
#m = m[0,:,:] print('Generating mask based on median filtering of the raw dispersive component')
#m = thresholdConnectedComponents(m,minPixelConnComp)
#mask[m==0] = 0.0 # Open raw dispersive component (non-filtered, no unwrapping-error corrected)
dispFilename = os.path.join(self.insar.ionosphereDirname,self.insar.dispersiveFilename)
sigFilename = os.path.join(self.insar.ionosphereDirname,self.insar.dispersiveFilename+'.sig')
ds = gdal.Open(dispFilename+'.vrt',gdal.GA_ReadOnly)
disp = ds.GetRasterBand(1).ReadAsArray()
ds=None
#outName = os.path.join(inps.outDir, 'mask0.bil') mask = (np.abs(disp-median_filter(disp,15))<3*std_iono)
#length, width = mask.shape
#write(mask, outName, 1, 6) mask = mask.astype(np.float32)
#write_xml(outName, length, width) mask.tofile(maskFile)
dims=np.shape(mask)
write_xml(maskFile,dims[1],dims[0],1,"FLOAT","BIL")
else: else:
print ('generating a mask based on unwrapped files. Pixels with phase = 0 are masked out.') print ('generating a mask based on unwrapped files. Pixels with phase = 0 are masked out.')
cmd = 'imageMath.py -e="(a_1!=0)*(b_1!=0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram , highBandIgram , maskFile) cmd = 'imageMath.py -e="(a_1!=0)*(b_1!=0)" --a={0} --b={1} -t byte -s BIL -o {2}'.format(lowBandIgram , highBandIgram , maskFile)
os.system(cmd) os.system(cmd)
def unwrapp_error_correction(f0, B, dispFile, nonDispFile,lowBandIgram, highBandIgram, jumpsFile, y_ref=None, x_ref=None): def unwrapp_error_correction(f0, B, dispFile, nonDispFile,lowBandIgram, highBandIgram, y_ref=None, x_ref=None):
dFile = os.path.join(os.path.dirname(dispFile) , "dJumps.bil") dFile = os.path.join(os.path.dirname(dispFile) , "dJumps.bil")
mFile = os.path.join(os.path.dirname(dispFile) , "mJumps.bil") mFile = os.path.join(os.path.dirname(dispFile) , "mJumps.bil")
@ -373,28 +378,14 @@ def unwrapp_error_correction(f0, B, dispFile, nonDispFile,lowBandIgram, highBand
refL = 0.0 refL = 0.0
refH = 0.0 refH = 0.0
#cmd = 'imageMath.py -e="round(((a_1-{7}) - (b_1-{8}) - (2.0*{0}/3.0/{1})*c + (2.0*{0}/3.0/{1})*f )/2.0/PI)" --a={2} --b={3} --c={4} --f={5} -o {6} -t float32 -s BIL'.format(B, f0, highBandIgram, lowBandIgram, nonDispFile, dispFile, dFile, refH, refL) cmd = 'imageMath.py -e="round(((a_1+(2.0*PI)) - (b_1) - (2.0*{0}/3.0/{1})*c + (2.0*{0}/3.0/{1})*f )/2.0/PI)" --a={2} --b={3} --c={4} --f={5} -o {6} -t float32 -s BIL'.format(B, f0, highBandIgram, lowBandIgram, nonDispFile, dispFile, dFile)
cmd = 'imageMath.py -e="round(((a_1+(2.0*PI*g)) - (b_1) - (2.0*{0}/3.0/{1})*c + (2.0*{0}/3.0/{1})*f )/2.0/PI)" --a={2} --b={3} --c={4} --f={5} --g={6} -o {7} -t float32 -s BIL'.format(B, f0, highBandIgram, lowBandIgram, nonDispFile, dispFile, jumpsFile, dFile)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
#d = (phH - phL - (2.*B/3./f0)*ph_nondis + (2.*B/3./f0)*ph_iono )/2./PI
#d = np.round(d) cmd = 'imageMath.py -e="round(((a_1 ) + (b_1+(2.0*PI)) - 2.0*c - 2.0*f )/4.0/PI - g/2)" --a={0} --b={1} --c={2} --f={3} --g={4} -o {5} -t float32 -s BIL'.format(lowBandIgram, highBandIgram, nonDispFile, dispFile, dFile, mFile)
#cmd = 'imageMath.py -e="round(((a_1 - {6}) + (b_1-{7}) - 2.0*c - 2.0*f )/4.0/PI - g/2)" --a={0} --b={1} --c={2} --f={3} --g={4} -o {5} -t float32 -s BIL'.format(lowBandIgram, highBandIgram, nonDispFile, dispFile, dFile, mFile, refL, refH)
cmd = 'imageMath.py -e="round(((a_1 ) + (b_1+(2.0*PI*k)) - 2.0*c - 2.0*f )/4.0/PI - g/2)" --a={0} --b={1} --c={2} --f={3} --g={4} --k={5} -o {6} -t float32 -s BIL'.format(lowBandIgram, highBandIgram, nonDispFile, dispFile, dFile, jumpsFile, mFile)
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
#m = (phL + phH - 2*ph_nondis - 2*ph_iono)/4./PI - d/2.
#m = np.round(m)
return mFile , dFile return mFile , dFile
@ -451,33 +442,33 @@ def runDispersive(self):
pulseLength = masterFrame.instrument.pulseLength pulseLength = masterFrame.instrument.pulseLength
chirpSlope = masterFrame.instrument.chirpSlope chirpSlope = masterFrame.instrument.chirpSlope
# Total Bandwidth # Total Bandwidth
B = np.abs(chirpSlope)*pulseLength B = np.abs(chirpSlope)*pulseLength
###Determine looks ###Determine looks
azLooks, rgLooks = self.insar.numberOfLooks( masterFrame, self.posting, azLooks, rgLooks = self.insar.numberOfLooks( masterFrame, self.posting,
self.numberAzimuthLooks, self.numberRangeLooks) self.numberAzimuthLooks, self.numberRangeLooks)
#########################################################
# make sure the low-band and high-band interferograms have consistent unwrapping errors.
# For this we estimate jumps as the difference of lowBand and highBand phases divided by 2PI
# The assumprion is that bothe interferograms are flattened and the phase difference between them
# is less than 2PI. This assumprion is valid for current sensors. It needs to be evaluated for
# future sensors like NISAR.
jumpsFile = check_consistency(lowBandIgram, highBandIgram, outputDir)
#########################################################
# estimating the dispersive and non-dispersive components # estimating the dispersive and non-dispersive components
dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, jumpsFile) dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive)
# If median filter is selected, compute the ionosphere phase standard deviation at a mean coherence value defined by the user
if self.dispersive_filter_mask_type == "median_filter":
coh_thres = self.dispersive_filter_coherence_threshold
std_iono = std_iono_mean_coh(f0,fL,fH,coh_thres,rgLooks,azLooks)
else:
std_iono = None
# generating a mask which will help filtering the estimated dispersive and non-dispersive phase # generating a mask which will help filtering the estimated dispersive and non-dispersive phase
getMask(self, maskFile) getMask(self, maskFile,std_iono)
# Calculating the theoretical standard deviation of the estimation based on the coherence of the interferograms # Calculating the theoretical standard deviation of the estimation based on the coherence of the interferograms
theoretical_variance_fromSubBands(self, f0, fL, fH, B, sigmaDispersive, sigmaNonDispersive, azLooks * rgLooks) theoretical_variance_fromSubBands(self, f0, fL, fH, B, sigmaDispersive, sigmaNonDispersive, azLooks * rgLooks)
# low pass filtering the dispersive phase # low pass filtering the dispersive phase
lowPassFilter(outDispersive, sigmaDispersive, maskFile, lowPassFilter(self,outDispersive, sigmaDispersive, maskFile,
self.kernel_x_size, self.kernel_y_size, self.kernel_x_size, self.kernel_y_size,
self.kernel_sigma_x, self.kernel_sigma_y, self.kernel_sigma_x, self.kernel_sigma_y,
iteration = self.dispersive_filter_iterations, iteration = self.dispersive_filter_iterations,
@ -485,7 +476,7 @@ def runDispersive(self):
# low pass filtering the non-dispersive phase # low pass filtering the non-dispersive phase
lowPassFilter(outNonDispersive, sigmaNonDispersive, maskFile, lowPassFilter(self,outNonDispersive, sigmaNonDispersive, maskFile,
self.kernel_x_size, self.kernel_y_size, self.kernel_x_size, self.kernel_y_size,
self.kernel_sigma_x, self.kernel_sigma_y, self.kernel_sigma_x, self.kernel_sigma_y,
iteration = self.dispersive_filter_iterations, iteration = self.dispersive_filter_iterations,
@ -494,21 +485,21 @@ def runDispersive(self):
# Estimating phase unwrapping errors # Estimating phase unwrapping errors
mFile , dFile = unwrapp_error_correction(f0, B, outDispersive+".filt", outNonDispersive+".filt", mFile , dFile = unwrapp_error_correction(f0, B, outDispersive+".filt", outNonDispersive+".filt",
lowBandIgram, highBandIgram, jumpsFile) lowBandIgram, highBandIgram)
# re-estimate the dispersive and non-dispersive phase components by taking into account the unwrapping errors # re-estimate the dispersive and non-dispersive phase components by taking into account the unwrapping errors
outDispersive = outDispersive + ".unwCor" outDispersive = outDispersive + ".unwCor"
outNonDispersive = outNonDispersive + ".unwCor" outNonDispersive = outNonDispersive + ".unwCor"
dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, jumpsFile, m=mFile , d=dFile) dispersive_nonDispersive(lowBandIgram, highBandIgram, f0, fL, fH, outDispersive, outNonDispersive, m=mFile , d=dFile)
# low pass filtering the new estimations # low pass filtering the new estimations
lowPassFilter(outDispersive, sigmaDispersive, maskFile, lowPassFilter(self,outDispersive, sigmaDispersive, maskFile,
self.kernel_x_size, self.kernel_y_size, self.kernel_x_size, self.kernel_y_size,
self.kernel_sigma_x, self.kernel_sigma_y, self.kernel_sigma_x, self.kernel_sigma_y,
iteration = self.dispersive_filter_iterations, iteration = self.dispersive_filter_iterations,
theta = self.kernel_rotation) theta = self.kernel_rotation)
lowPassFilter(outNonDispersive, sigmaNonDispersive, maskFile, lowPassFilter(self,outNonDispersive, sigmaNonDispersive, maskFile,
self.kernel_x_size, self.kernel_y_size, self.kernel_x_size, self.kernel_y_size,
self.kernel_sigma_x, self.kernel_sigma_y, self.kernel_sigma_x, self.kernel_sigma_y,
iteration = self.dispersive_filter_iterations, iteration = self.dispersive_filter_iterations,

View File

@ -24,6 +24,8 @@ def runESD(self, debugPlot=True):
extraOffset = self.extraESDCycles * np.pi * 2 extraOffset = self.extraESDCycles * np.pi * 2
val = np.array([])
for swath in swathList: for swath in swathList:
if self._insar.numberOfCommonBursts[swath-1] < 2: if self._insar.numberOfCommonBursts[swath-1] < 2:
@ -53,7 +55,6 @@ def runESD(self, debugPlot=True):
os.remove(ff) os.remove(ff)
val = []
lineCount = 0 lineCount = 0
for ii in range(minBurst, maxBurst): for ii in range(minBurst, maxBurst):
intname = os.path.join(esddir, 'overlap_IW%d_%02d.%dalks_%drlks.int'%(swath,ii+1, alks,rlks)) intname = os.path.join(esddir, 'overlap_IW%d_%02d.%dalks_%drlks.int'%(swath,ii+1, alks,rlks))

View File

@ -470,7 +470,7 @@ def mmapFromISCE(fname, logger=None):
isceFile=False isceFile=False
dataName = fname dataName = fname
except: except:
raise Exception('Input file: {0} should either be an ISCE image / GDAL image. Appears to be neither') raise Exception('Input file: {0} should either be an ISCE image / GDAL image. Appears to be neither'.format(fname))
if logger is not None: if logger is not None:
logger.debug('Creating readonly ISCE mmap with \n' + logger.debug('Creating readonly ISCE mmap with \n' +

View File

@ -45,10 +45,6 @@ ellipsoid oblate ellipsoid of revolution (e.g, WGS84) with all the
See mainpage.txt for a complete dump of geo's philosophy-- otherwise, See mainpage.txt for a complete dump of geo's philosophy-- otherwise,
use the docstrings. use the docstrings.
""" """
import os
isce_path = os.getenv("ISCE_HOME")
## \namespace geo Vector- and Affine-spaces, on Earth ## \namespace geo Vector- and Affine-spaces, on Earth
__all__ = ['euclid', 'coordinates', 'ellipsoid', 'charts', 'affine', 'motion'] __all__ = ['euclid', 'coordinates', 'ellipsoid', 'charts', 'affine', 'motion']

View File

@ -297,7 +297,7 @@ class Application(Component, StepHelper):
(self._pickleObj, os.path.join(self.pickleLoadDir, name)) (self._pickleObj, os.path.join(self.pickleLoadDir, name))
) )
except IOError: except IOError:
print("Cannot open %s", os.path.join(self.pickleLoadDir, name)) print("Cannot open %s" % (os.path.join(self.pickleLoadDir, name)))
return None return None

View File

@ -32,10 +32,7 @@ from __future__ import print_function
import os import os
import sys import sys
import operator import operator
import logging from isce import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
from iscesys.DictUtils.DictUtils import DictUtils as DU from iscesys.DictUtils.DictUtils import DictUtils as DU
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
Compatibility.checkPythonVersion() Compatibility.checkPythonVersion()

View File

@ -37,8 +37,7 @@ import isce
import zipfile import zipfile
import os import os
import sys import sys
import logging from isce import logging
import logging.config
from iscesys.Component.Component import Component from iscesys.Component.Component import Component
import shutil import shutil
from urllib import request from urllib import request
@ -325,8 +324,4 @@ class DataRetriever(Component):
# logger not defined until baseclass is called # logger not defined until baseclass is called
if not self.logger: if not self.logger:
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf')
)
self.logger = logging.getLogger('isce.iscesys.DataRetriever') self.logger = logging.getLogger('isce.iscesys.DataRetriever')

View File

@ -81,7 +81,7 @@ class DictUtils:
spare = [] spare = []
# dict1 is the one to update # dict1 is the one to update
for k2,v2 in dict2.items(): for k2,v2 in dict(dict2).items():
if DictUtils.keyIsIn(k2,dict1): if DictUtils.keyIsIn(k2,dict1):
if isinstance(v2,dict):#if is a dict keep going down the node if isinstance(v2,dict):#if is a dict keep going down the node
DictUtils.updateDictionary(dict1[k2],v2,replace,spare) DictUtils.updateDictionary(dict1[k2],v2,replace,spare)

View File

@ -250,7 +250,7 @@ int takeLookscpx(DataAccessor *IAIn, DataAccessor* IAout, int ld, int la)
for(int j = 0; j < nfull; j++) for(int j = 0; j < nfull; j++)
{ {
bdbl[j] += ain[j]; bdbl[j] += complex<double>(ain[j].real(), ain[j].imag());
} }
} }

View File

@ -29,10 +29,8 @@
import os
import logging import logging
import math import math
import logging.config
from iscesys.Compatibility import Compatibility from iscesys.Compatibility import Compatibility
@ -40,9 +38,6 @@ from isceobj.Planet import Planet
from isceobj import Constants as CN from isceobj import Constants as CN
from iscesys.Component.Component import Component, Port from iscesys.Component.Component import Component, Port
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate', RANGE_SAMPLING_RATE = Component.Parameter('rangeSamplingRate',
public_name='range sampling rate', public_name='range sampling rate',
type=float, type=float,

View File

@ -43,8 +43,7 @@ import os
import sys import sys
import math import math
import urllib.request, urllib.parse, urllib.error import urllib.request, urllib.parse, urllib.error
import logging from isce import logging
import logging.config
from iscesys.Component.Component import Component from iscesys.Component.Component import Component
import xml.etree.ElementTree as ET import xml.etree.ElementTree as ET
@ -1013,10 +1012,6 @@ class DemStitcher(Component):
# logger not defined until baseclass is called # logger not defined until baseclass is called
if not self.logger: if not self.logger:
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf')
)
self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcher') self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcher')
url = property(getUrl,setUrl) url = property(getUrl,setUrl)

View File

@ -39,8 +39,7 @@ from ctypes import cdll
import os import os
import sys import sys
import urllib.request, urllib.error, urllib.parse import urllib.request, urllib.error, urllib.parse
import logging from isce import logging
import logging.config
from iscesys.Component.Component import Component from iscesys.Component.Component import Component
from contrib.demUtils.DemStitcher import DemStitcher as DS from contrib.demUtils.DemStitcher import DemStitcher as DS
#Parameters definitions #Parameters definitions
@ -291,7 +290,4 @@ class DemStitcher(DS):
#it's /srtm/version2_1/SRTM(1,3) #it's /srtm/version2_1/SRTM(1,3)
self._remove = ['.jpg','.xml'] self._remove = ['.jpg','.xml']
if not self.logger: if not self.logger:
logging.config.fileConfig(
os.environ['ISCE_HOME'] + '/library/applications/logging.conf'
)
self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcherV3') self.logger = logging.getLogger('isce.contrib.demUtils.DemStitcherV3')

View File

@ -39,9 +39,8 @@ from ctypes import cdll
import numpy as np import numpy as np
import os import os
import sys import sys
import logging from isce import logging
import math import math
import logging.config
import urllib.request, urllib.parse, urllib.error import urllib.request, urllib.parse, urllib.error
from iscesys.Component.Component import Component from iscesys.Component.Component import Component
from contrib.demUtils.DemStitcher import DemStitcher from contrib.demUtils.DemStitcher import DemStitcher
@ -315,9 +314,6 @@ class SWBDStitcher(DemStitcher):
#it's /srtm/version2_1/SRTM(1,3) #it's /srtm/version2_1/SRTM(1,3)
self._remove = ['.jpg','.xml'] self._remove = ['.jpg','.xml']
if not self.logger: if not self.logger:
logging.config.fileConfig(
os.environ['ISCE_HOME'] + '/library/applications/logging.conf'
)
self.logger = logging.getLogger('isce.contrib.demUtils.SWBDStitcher') self.logger = logging.getLogger('isce.contrib.demUtils.SWBDStitcher')
self.parameter_list = self.parameter_list + super(DemStitcher,self).parameter_list self.parameter_list = self.parameter_list + super(DemStitcher,self).parameter_list

View File

@ -35,8 +35,7 @@ import sys
import math import math
from html.parser import HTMLParser from html.parser import HTMLParser
import urllib.request, urllib.parse, urllib.error import urllib.request, urllib.parse, urllib.error
import logging from isce import logging
import logging.config
from iscesys.Component.Component import Component from iscesys.Component.Component import Component
import zipfile import zipfile
import os import os
@ -979,10 +978,6 @@ class MaskStitcher(Component):
# logger not defined until baseclass is called # logger not defined until baseclass is called
if not self.logger: if not self.logger:
logging.config.fileConfig(
os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf')
)
self.logger = logging.getLogger('isce.contrib.demUtils.MaskStitcher') self.logger = logging.getLogger('isce.contrib.demUtils.MaskStitcher')
utl = property(getUrl,setUrl) utl = property(getUrl,setUrl)

View File

@ -32,10 +32,7 @@
import os import os
import math import math
import logging from isce import logging
import logging.config
logging.config.fileConfig(os.path.join(os.environ['ISCE_HOME'], 'defaults',
'logging', 'logging.conf'))
import isce import isce
from iscesys.Component.FactoryInit import FactoryInit from iscesys.Component.FactoryInit import FactoryInit

View File

@ -35,7 +35,7 @@ import os
Import('envmdx') Import('envmdx')
envmdx.Append( CCFLAGS=['-DSUN','-DIO64','-I'+envmdx['MOTIFINCPATH'],'-I'+envmdx['X11INCPATH']] ) envmdx.Append( CCFLAGS=['-DSUN','-DIO64','-I'+envmdx['MOTIFINCPATH'],'-I'+envmdx['X11INCPATH']] )
envmdx.PrependUnique( LIBPATH=[envmdx['MOTIFLIBPATH'],envmdx['X11LIBPATH']] ) envmdx.PrependUnique( LIBPATH=[envmdx['MOTIFLIBPATH'],envmdx['X11LIBPATH']] )
envmdx.Append( FORTRANFLAGS=['-DSUN','-DIO64','-DGFORTRAN'] ) envmdx.Append( FORTRANFLAGS=['-DSUN','-DIO64'] )
listFiles = ['graphx_mdx.c','rdf_reader_subs.f','mdx_subs.F'] listFiles = ['graphx_mdx.c','rdf_reader_subs.f','mdx_subs.F']
build = envmdx['PRJ_LIB_DIR'] build = envmdx['PRJ_LIB_DIR']

View File

@ -74,12 +74,6 @@ c*****************************************************************
integer i_arg integer i_arg
integer i_inarg integer i_inarg
integer iargc
#ifdef GFORTRAN
c external iargc
#else
external iargc
#endif
integer rdflen integer rdflen
external rdflen external rdflen
@ -110,7 +104,7 @@ c external iargc
equivalence(b_data,r_data) equivalence(b_data,r_data)
a_cmd = '-V' a_cmd = '-V'
i_inarg = iargc() i_inarg = command_argument_count()
if (i_inarg .eq. 0) then if (i_inarg .eq. 0) then
write(6,*) ' ' write(6,*) ' '
write(6,'(1x,a,a18,a)' ) ' << mdx Version ',version_mdx(), ' >> ' write(6,'(1x,a,a18,a)' ) ' << mdx Version ',version_mdx(), ' >> '

View File

@ -4,52 +4,80 @@
# Copyright 2016 # Copyright 2016
# #
import numpy as np
import argparse
import os import os
import isce import argparse
import isceobj import numpy as np
from scipy import ndimage
import matplotlib.pyplot as plt
import gdal import gdal
from gdalconst import GA_ReadOnly from gdalconst import GA_ReadOnly
from scipy import ndimage
# suppress the DEBUG message
import logging
mpl_logger = logging.getLogger('matplotlib')
mpl_logger.setLevel(logging.WARNING)
import isce
import isceobj
from isceobj.Util.ImageUtil import ImageLib as IML
GDAL2NUMPY_DATATYPE = { GDAL2NUMPY_DATATYPE = {
1 : np.uint8,
1 : np.uint8, 2 : np.uint16,
2 : np.uint16, 3 : np.int16,
3 : np.int16, 4 : np.uint32,
4 : np.uint32, 5 : np.int32,
5 : np.int32, 6 : np.float32,
6 : np.float32, 7 : np.float64,
7 : np.float64, 10: np.complex64,
10: np.complex64, 11: np.complex128,
11: np.complex128,
} }
EXAMPLE = '''example:
MaskAndFilter.py -d offset.bip -s offset_snr.bip
MaskAndFilter.py -d offset.bip -s offset_snr.bip --plot
'''
def createParser(): def createParser():
''' '''
Command line parser. Command line parser.
''' '''
parser = argparse.ArgumentParser( description='filters the densOffset, oversamples it and adds back to the geometry offset') parser = argparse.ArgumentParser(description='Mask and filter the densOffset',
formatter_class=argparse.RawTextHelpFormatter,
epilog=EXAMPLE)
parser.add_argument('-d', '--dense_offset', dest='denseOffset', type=str, required=True, parser.add_argument('-d', '--dense_offset', dest='denseOffset', type=str, required=True,
help='The dense offsets file obtained from cross correlation or any other approach') help='The dense offsets file obtained from cross correlation or any other approach')
parser.add_argument('-s', '--snr', dest='snr', type=str, required=True, parser.add_argument('-s', '--snr', dest='snr', type=str, required=True,
help='The SNR of the dense offsets obtained from cross correlation or any other approach') help='The SNR of the dense offsets obtained from cross correlation or any other approach')
parser.add_argument('-n', '--filter_size', dest='filterSize', type=int, default=8, parser.add_argument('-n', '--filter_size', dest='filterSize', type=int, default=8,
help='The size of the median filter') help='Size of the median filter (default: %(default)s).')
parser.add_argument('-t', '--snr_threshold', dest='snrThreshold', type=float, default=5, parser.add_argument('-t', '--snr_threshold', dest='snrThreshold', type=float, default=5,
help='The snr threshold used to mask the offset') help='Min SNR used in the offset (default: %(default)s).')
# output
parser.add_argument('-A', '--output_azimuth_offset', dest='outAzimuth', type=str, default='filtAzimuth.off', parser.add_argument('-A', '--output_azimuth_offset', dest='outAzimuth', type=str, default='filtAzimuth.off',
help='The azimuth offsets after rubber sheeting') help='File name of the azimuth offsets after rubber sheeting (default: %(default)s).')
parser.add_argument('-R', '--output_range_offset', dest='outRange', type=str, default='filtRange.off', parser.add_argument('-R', '--output_range_offset', dest='outRange', type=str, default='filtRange.off',
help='The range offsets after rubber sheeting') help='File name of the range offsets after rubber sheeting (default: %(default)s).')
parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./', parser.add_argument('-o', '--output_directory', dest='outDir', type=str, default='./',
help='Output directory') help='Output directory (default: %(default)s).')
parser.add_argument('-p', '--plot', dest='plot', action='store_true', default=False,
help='plot the offsets before and after masking and filtering') # plot
plot = parser.add_argument_group('plot')
plot.add_argument('-p', '--plot', dest='plot', action='store_true', default=False,
help='plot the offsets before and after masking and filtering')
plot.add_argument('-v', dest='vlim', nargs=2, type=float, default=(-0.05, 0.05),
help='display range for offset (default: %(default)s).')
plot.add_argument('--v-snr', dest='vlim_snr', nargs=2, type=float, default=(0, 100),
help='display range for offset SNR (default: %(default)s).')
plot.add_argument('--figsize', dest='figsize', nargs=2, type=float, default=(18, 5),
help='figure size in inch (default: %(default)s).')
plot.add_argument('--save', dest='fig_name', type=str, default=None,
help='save figure as file')
return parser return parser
@ -58,7 +86,7 @@ def cmdLineParse(iargs = None):
return parser.parse_args(args=iargs) return parser.parse_args(args=iargs)
def read(file, processor='ISCE' , bands=None , dataType=None): def read(file, processor='ISCE', bands=None, dataType=None):
''' raeder based on GDAL. ''' raeder based on GDAL.
Args: Args:
@ -73,10 +101,13 @@ def read(file, processor='ISCE' , bands=None , dataType=None):
Returns: Returns:
* data : A numpy array with dimensions : number_of_bands * length * width * data : A numpy array with dimensions : number_of_bands * length * width
''' '''
# generate ENVI hdr file and fix the file path in xml
file = os.path.abspath(file)
if processor == 'ISCE': if processor == 'ISCE':
cmd = 'isce2gis.py envi -i ' + file img, dataname, metaname = IML.loadImage(file)
os.system(cmd) img.filename = file
img.setAccessMode('READ')
img.renderHdr()
dataset = gdal.Open(file,GA_ReadOnly) dataset = gdal.Open(file,GA_ReadOnly)
@ -97,23 +128,22 @@ def read(file, processor='ISCE' , bands=None , dataType=None):
# Fill the array with the Raster bands # Fill the array with the Raster bands
idx=0 idx=0
for i in bands: for i in bands:
band=dataset.GetRasterBand(i) band=dataset.GetRasterBand(i)
data[idx,:,:] = band.ReadAsArray() data[idx,:,:] = band.ReadAsArray()
idx+=1 idx+=1
dataset = None dataset = None
return data return data
def write(raster, fileName, nbands, bandType): def write(raster, fileName, nbands, bandType):
############
# Create the file # Create the file
driver = gdal.GetDriverByName( 'ENVI' ) driver = gdal.GetDriverByName( 'ENVI' )
dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType ) dst_ds = driver.Create(fileName, raster.shape[1], raster.shape[0], nbands, bandType )
dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 ) dst_ds.GetRasterBand(1).WriteArray( raster, 0 ,0 )
dst_ds = None dst_ds = None
return
def fill(data, invalid=None): def fill(data, invalid=None):
""" """
@ -132,40 +162,46 @@ def fill(data, invalid=None):
if invalid is None: invalid = np.isnan(data) if invalid is None: invalid = np.isnan(data)
ind = ndimage.distance_transform_edt(invalid, ind = ndimage.distance_transform_edt(invalid,
return_distances=False, return_distances=False,
return_indices=True) return_indices=True)
return data[tuple(ind)] return data[tuple(ind)]
def mask_filter(inps, band, outName, plot=False):
#masking and Filtering
Offset = read(inps.denseOffset, bands=band)
Offset = Offset[0,:,:]
def mask_filter(inps, band, outName):
"""masking and Filtering"""
# read offset
offset = read(inps.denseOffset, bands=band)
offset = offset[0,:,:]
# read SNR
snr = read(inps.snr, bands=[1]) snr = read(inps.snr, bands=[1])
snr = snr[0,:,:] snr = snr[0,:,:]
snr[np.isnan(snr)] = 0
# Masking the dense offsets based on SNR # mask the offset based on SNR
print ('masking the dense offsets with SNR threshold: ', inps.snrThreshold) print('masking the dense offsets with SNR threshold: {}'.format(inps.snrThreshold))
Offset[snr<inps.snrThreshold] = 0 offset1 = np.array(offset)
offset1[snr < inps.snrThreshold] = np.nan
############ # fill the hole in offset with nearest data
Offset[snr<inps.snrThreshold]=np.nan print('fill the masked out region with nearest data')
Offset = fill(Offset) offset2 = fill(offset1)
############
# Median filtering the masked offsets # median filtering
print ('Filtering with median filter with size : ', inps.filterSize) print('filtering with median filter with size : ', inps.filterSize)
Offset = ndimage.median_filter(Offset, size=inps.filterSize) offset3 = ndimage.median_filter(offset2, size=inps.filterSize)
length, width = Offset.shape length, width = offset3.shape
# writing the masked and filtered offsets to a file # write data to file
print ('writing masked and filtered offsets to: ', outName) print('writing masked and filtered offsets to: ', outName)
write(Offset, outName, 1, 6) write(offset3, outName, 1, 6)
# write the xml file # write the xml/vrt/hdr file
img = isceobj.createImage() img = isceobj.createImage()
img.setFilename(outName) img.setFilename(outName)
img.setWidth(width) img.setWidth(width)
img.setLength(length)
img.setAccessMode('READ') img.setAccessMode('READ')
img.bands = 1 img.bands = 1
img.dataType = 'FLOAT' img.dataType = 'FLOAT'
@ -175,103 +211,72 @@ def mask_filter(inps, band, outName, plot=False):
img.renderVRT() img.renderVRT()
#img.finalizeImage() #img.finalizeImage()
################################ return [snr, offset, offset1, offset2, offset3]
if plot:
import matplotlib.pyplot as plt
fig = plt.figure()
ax=fig.add_subplot(1,2,1)
# cax=ax.imshow(azOffset[800:,:], vmin=-2, vmax=4)
cax=ax.imshow(Offset, vmin=-2, vmax=4)
ax.set_title('''Offset''')
ax=fig.add_subplot(1,2,2)
#ax.imshow(azOffset_filt[800:,:], vmin=-2, vmax=4)
ax.imshow(Offset_filt, vmin=-2, vmax=4)
ax.set_title('''Offset filt''')
plt.show()
def getShape(file): def plot_mask_and_filtering(az_list, rg_list, inps=None):
dataset = gdal.Open(file,GA_ReadOnly)
return dataset.RasterYSize, dataset.RasterXSize
def resampleOffset(maskedFiltOffset, geometryOffset, resampledOffset, outName): fig, axs = plt.subplots(nrows=2, ncols=5, figsize=inps.figsize, sharex=True, sharey=True)
titles = ['SNR', 'offset', 'offset (mask)', 'offset (mask/fill)', 'offset (mask/fill/filter)']
length, width = getShape(geometryOffset)
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
cmd = 'gdal_translate -of ENVI -outsize ' + str(width) + ' ' + str(length) + ' ' + maskedFiltOffset + ' ' + resampledOffset
os.system(cmd)
img = isceobj.createImage() # plot SNR
img.setFilename(resampledOffset) im0 = axs[0,0].imshow(az_list[0], vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu')
img.setWidth(width) im0 = axs[1,0].imshow(rg_list[0], vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu')
img.setAccessMode('READ') axs[0,0].set_title('SNR', fontsize=12)
img.bands = 1
img.dataType = 'FLOAT'
img.scheme = 'BIP'
#img.createImage()
img.renderHdr()
img.renderVRT()
#img.finalizeImage()
print ('Adding the dense offsets to the geometry offsets. Output: ', outName) # label
cmd = "imageMath.py -e='a+b' -o " + outName + " -t float --a=" + geometryOffset + " --b=" + resampledOffset axs[0,0].set_ylabel('azimuth', fontsize=12)
os.system(cmd) axs[1,0].set_ylabel('range', fontsize=12)
# plot offset
for i in range(1,len(az_list)):
im1 = axs[0,i].imshow(az_list[i], vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet')
im1 = axs[1,i].imshow(rg_list[i], vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet')
axs[0,i].set_title(titles[i], fontsize=12)
fig.tight_layout()
# colorbar
fig.subplots_adjust(bottom=0.15)
cax0 = fig.add_axes([0.09, 0.1, 0.08, 0.015])
cbar0 = plt.colorbar(im0, cax=cax0, orientation='horizontal')
cax0.yaxis.set_ticks_position('left')
#fig.subplots_adjust(right=0.93)
cax1 = fig.add_axes([0.57, 0.1, 0.08, 0.015])
cbar1 = plt.colorbar(im1, cax=cax1, orientation='horizontal')
cbar1.set_label('pixel', fontsize=12)
# save figure to file
if inps.fig_name is not None:
inps.fig_name = os.path.abspath(inps.fig_name)
print('save figure to file {}'.format(inps.fig_name))
plt.savefig(inps.fig_name, bbox_inches='tight', transparent=True, dpi=300)
plt.show()
return
def main(iargs=None): def main(iargs=None):
inps = cmdLineParse(iargs) inps = cmdLineParse(iargs)
#######################
# working on the azimuth offsets
#######################
#cmd = 'isce2gis.py envi -i ' + inps.geometryAzimuthOffset
#os.system(cmd)
if not os.path.exists(inps.outDir):
os.makedirs(inps.outDir)
####################### if not os.path.exists(inps.outDir):
# masking the dense offsets based on SNR and median filter the masked offsets os.makedirs(inps.outD)
maskedFiltOffset = os.path.join(inps.outDir, inps.outAzimuth)
mask_filter(inps, band=[1], outName = maskedFiltOffset, plot=inps.plot)
cmd = 'isce2gis.py envi -i ' + maskedFiltOffset #######################
os.system(cmd) # masking the dense offsets based on SNR and median filter the masked offs
#######################
# resampling the masked and filtered dense offsets to the same
# grid size of the geometry offsets and adding back to the
# geometry offsets
# outAzimuth = os.path.join(inps.outDir, inps.outAzimuth)
# resampledDenseOffset = os.path.join(inps.outDir, 'filtAzOff_resampled.bil') # azimuth offsets
# resampleOffset(maskedFiltOffset, inps.geometryAzimuthOffset, resampledDenseOffset, outAzimuth) inps.outAzimuth = os.path.join(inps.outDir, inps.outAzimuth)
az_list = mask_filter(inps, band=[1], outName=inps.outAzimuth)
#######################
#######################
# working on the range offsets
#######################
#cmd = 'isce2gis.py envi -i ' + inps.geometryRangeOffset
#os.system(cmd)
if not os.path.exists(inps.outDir):
os.makedirs(inps.outDir)
#######################
# masking the dense offsets based on SNR and median filter the masked offsets
maskedFiltOffset = os.path.join(inps.outDir, inps.outRange) # range offsets
mask_filter(inps, band=[2], outName = maskedFiltOffset, plot=inps.plot) inps.outRange = os.path.join(inps.outDir, inps.outRange)
rg_list = mask_filter(inps, band=[2], outName=inps.outRange)
cmd = 'isce2gis.py envi -i ' + maskedFiltOffset # plot result
os.system(cmd) if inps.plot:
####################### plot_mask_and_filtering(az_list, rg_list, inps)
# resampling the masked and filtered dense offsets to the same grid size of the geometry offsets return
#outRange = os.path.join(inps.outDir, inps.outRange)
#resampledDenseOffset = os.path.join(inps.outDir, 'filtRngOff_resampled.bil')
#resampleOffset(maskedFiltOffset, inps.geometryRangeOffset, resampledDenseOffset, outRange)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -29,7 +29,6 @@ d) fix the path of the file in the xml file of the DEM by using fixImageXml.py.
mkdir DEM; cd DEM mkdir DEM; cd DEM
dem.py -a stitch -b -37 -31 -72 -69 -r -s 1 -c dem.py -a stitch -b -37 -31 -72 -69 -r -s 1 -c
rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt
fixImageXml.py -f -i demLat*.dem.wgs84
cd .. cd ..
``` ```

View File

@ -654,33 +654,40 @@ class workflow(object):
############################## ##############################
def baselinePair(baselineDir, master, slave): def baselinePair(baselineDir, master, slave,doBaselines=True):
try: if doBaselines: # open files to calculate baselines
mdb = shelve.open( os.path.join(master, 'raw'), flag='r') try:
sdb = shelve.open( os.path.join(slave, 'raw'), flag='r') mdb = shelve.open( os.path.join(master, 'raw'), flag='r')
except: sdb = shelve.open( os.path.join(slave, 'raw'), flag='r')
mdb = shelve.open( os.path.join(master, 'data'), flag='r') except:
sdb = shelve.open( os.path.join(slave, 'data'), flag='r') mdb = shelve.open( os.path.join(master, 'data'), flag='r')
sdb = shelve.open( os.path.join(slave, 'data'), flag='r')
mFrame = mdb['frame'] mFrame = mdb['frame']
sFrame = sdb['frame'] sFrame = sdb['frame']
bObj = Baseline() bObj = Baseline()
bObj.configure() bObj.configure()
bObj.wireInputPort(name='masterFrame', object=mFrame) bObj.wireInputPort(name='masterFrame', object=mFrame)
bObj.wireInputPort(name='slaveFrame', object=sFrame) bObj.wireInputPort(name='slaveFrame', object=sFrame)
bObj.baseline() bObj.baseline() # calculate baseline from orbits
pBaselineBottom = bObj.pBaselineBottom
pBaselineTop = bObj.pBaselineTop
else: # set baselines to zero if not calculated
pBaselineBottom = 0.0
pBaselineTop = 0.0
baselineOutName = os.path.basename(master) + "_" + os.path.basename(slave) + ".txt" baselineOutName = os.path.basename(master) + "_" + os.path.basename(slave) + ".txt"
f = open(os.path.join(baselineDir, baselineOutName) , 'w') f = open(os.path.join(baselineDir, baselineOutName) , 'w')
f.write("PERP_BASELINE_BOTTOM " + str(bObj.pBaselineBottom) + '\n') f.write("PERP_BASELINE_BOTTOM " + str(pBaselineBottom) + '\n')
f.write("PERP_BASELINE_TOP " + str(bObj.pBaselineTop) + '\n') f.write("PERP_BASELINE_TOP " + str(pBaselineTop) + '\n')
f.close() f.close()
print('Baseline at top/bottom: %f %f'%(bObj.pBaselineTop,bObj.pBaselineBottom)) print('Baseline at top/bottom: %f %f'%(pBaselineTop,pBaselineBottom))
return (bObj.pBaselineTop+bObj.pBaselineBottom)/2. return (pBaselineTop+pBaselineBottom)/2.
def baselineStack(inps,stackMaster,acqDates): def baselineStack(inps,stackMaster,acqDates,doBaselines=True):
from collections import OrderedDict from collections import OrderedDict
baselineDir = os.path.join(inps.workDir,'baselines') baselineDir = os.path.join(inps.workDir,'baselines')
if not os.path.exists(baselineDir): if not os.path.exists(baselineDir):
@ -693,7 +700,7 @@ def baselineStack(inps,stackMaster,acqDates):
for slv in acqDates: for slv in acqDates:
if slv != stackMaster: if slv != stackMaster:
slave = os.path.join(inps.slcDir, slv) slave = os.path.join(inps.slcDir, slv)
baselineDict[slv]=baselinePair(baselineDir, master, slave) baselineDict[slv]=baselinePair(baselineDir, master, slave, doBaselines)
t = datetime.datetime.strptime(slv, datefmt) t = datetime.datetime.strptime(slv, datefmt)
timeDict[slv] = t - t0 timeDict[slv] = t - t0
else: else:
@ -702,11 +709,11 @@ def baselineStack(inps,stackMaster,acqDates):
return baselineDict, timeDict return baselineDict, timeDict
def selectPairs(inps,stackMaster, slaveDates, acuisitionDates): def selectPairs(inps,stackMaster, slaveDates, acuisitionDates,doBaselines=True):
baselineDict, timeDict = baselineStack(inps, stackMaster, acuisitionDates)
baselineDict, timeDict = baselineStack(inps, stackMaster, acuisitionDates,doBaselines)
for slave in slaveDates: for slave in slaveDates:
print (slave,' : ' , baselineDict[slave]) print (slave,' : ' , baselineDict[slave])
numDates = len(acuisitionDates) numDates = len(acuisitionDates)
pairs = [] pairs = []
for i in range(numDates-1): for i in range(numDates-1):

View File

@ -32,10 +32,16 @@ def createParser():
'and (0,360) or (-180,180) for longitudes.') 'and (0,360) or (-180,180) for longitudes.')
parser.add_argument('-d','--dem_file', dest='demName', type=str, default=None, parser.add_argument('-d','--dem_file', dest='demName', type=str, default=None,
help='DEM file in geo coordinates, i.e. demLat*.dem.wgs84.') help='DEM file in geo coordinates, i.e. demLat*.dem.wgs84.')
parser.add_argument('-l', '--lat_file', dest='latName', type=str, default=None, parser.add_argument('-l', '--lat_file', dest='latName', type=str, default=None,
help='pixel by pixel lat file in radar coordinate') help='pixel by pixel lat file in radar coordinate')
parser.add_argument('-L', '--lon_file', dest='lonName', type=str, default=None, parser.add_argument('-L', '--lon_file', dest='lonName', type=str, default=None,
help='pixel by pixel lat file in radar coordinate') help='pixel by pixel lat file in radar coordinate')
parser.add_argument('--fill', dest='fillValue', type=int, default=-1, choices={-1,0},
help='fill value for pixels with missing data. Default: -1.\n'
'-1 for water body\n'
' 0 for land')
parser.add_argument('-o', '--output', dest='outfile', type=str, parser.add_argument('-o', '--output', dest='outfile', type=str,
help='output filename of water mask in radar coordinates') help='output filename of water mask in radar coordinates')
return parser return parser
@ -73,7 +79,7 @@ def dem2bbox(dem_file):
return bbox return bbox
def download_waterMask(bbox, dem_file): def download_waterMask(bbox, dem_file, fill_value=-1):
out_dir = os.getcwd() out_dir = os.getcwd()
# update out_dir and/or bbox if dem_file is input # update out_dir and/or bbox if dem_file is input
if dem_file: if dem_file:
@ -86,8 +92,7 @@ def download_waterMask(bbox, dem_file):
#inps.waterBodyGeo = sw.defaultName(inps.bbox) #inps.waterBodyGeo = sw.defaultName(inps.bbox)
sw.outputFile = os.path.join(out_dir, sw.defaultName(bbox)) sw.outputFile = os.path.join(out_dir, sw.defaultName(bbox))
sw._noFilling = False sw._noFilling = False
sw._fillingValue = -1.0 #fill pixels without DEM data with value of -1, same as water body sw._fillingValue = fill_value
#sw._fillingValue = 0.0
sw.stitch(bbox[0:2], bbox[2:]) sw.stitch(bbox[0:2], bbox[2:])
return sw.outputFile return sw.outputFile
@ -106,7 +111,7 @@ def geo2radar(geo_file, rdr_file, lat_file, lon_file):
def main(iargs=None): def main(iargs=None):
inps = cmdLineParse(iargs) inps = cmdLineParse(iargs)
geo_file = download_waterMask(inps.bbox, inps.demName) geo_file = download_waterMask(inps.bbox, inps.demName, inps.fillValue)
if inps.latName and inps.lonName: if inps.latName and inps.lonName:
geo2radar(geo_file, inps.outfile, inps.latName, inps.lonName) geo2radar(geo_file, inps.outfile, inps.latName, inps.lonName)
return return

View File

@ -1,4 +1,5 @@
#!/usr/bin/env python3 #!/usr/bin/env python3
# modified to work for different UAVSAR stack segments EJF 2019/05/04
import os import os
import glob import glob
@ -14,15 +15,17 @@ def createParser():
Create command line parser. Create command line parser.
''' '''
parser = argparse.ArgumentParser(description='Unzip Alos zip files.') parser = argparse.ArgumentParser(description='Prepare UAVSAR SLC Stack files.')
parser.add_argument('-i', '--input', dest='input', type=str, required=True, parser.add_argument('-i', '--input', dest='input', type=str, required=True,
help='directory which has all dates as directories. Inside each date, zip files are expected.') help='directory which has all dates.')
parser.add_argument('-d', '--dop_file', dest='dopFile', type=str, required=True, parser.add_argument('-d', '--dop_file', dest='dopFile', type=str, required=True,
help='Doppler file for the stack.') help='Doppler file for the stack. Needs to be in directory where command is run.')
parser.add_argument('-o', '--output', dest='output', type=str, required=True, parser.add_argument('-o', '--output', dest='output', type=str, required=True,
help='output directory which will be used for unpacking.') help='output directory which will be used for unpacking.')
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;' parser.add_argument('-s', '--segment', dest='segment', type=str, default='1',
, help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;') help='segment of the UAVSAR stack to prepare. For "s2" use "2", etc. Default is "1" ')
parser.add_argument('-t', '--text_cmd', dest='text_cmd', type=str, default='source ~/.bash_profile;',
help='text command to be added to the beginning of each line of the run files. Default: source ~/.bash_profile;')
return parser return parser
@ -64,14 +67,13 @@ def main(iargs=None):
inps = cmdLineParse(iargs) inps = cmdLineParse(iargs)
outputDir = os.path.abspath(inps.output) outputDir = os.path.abspath(inps.output)
run_unPack = 'run_unPackAlos'
####################################### #######################################
slc_files = glob.glob(os.path.join(inps.input, '*_s5_1x1.slc')) slc_files = glob.glob(os.path.join(inps.input, '*_s'+inps.segment+'_1x1.slc'))
for file in slc_files: for file in slc_files:
imgDate = get_Date(file) imgDate = get_Date(file)
print (imgDate) print (imgDate)
annFile = file.replace('_s5_1x1.slc','')+'.ann' annFile = file.replace('_s'+inps.segment+'_1x1.slc','')+'.ann'
print (annFile) print (annFile)
imgDir = os.path.join(outputDir,imgDate) imgDir = os.path.join(outputDir,imgDate)
if not os.path.exists(imgDir): if not os.path.exists(imgDir):
@ -81,7 +83,8 @@ def main(iargs=None):
print (cmd) print (cmd)
os.system(cmd) os.system(cmd)
cmd = 'mv ' + file + ' ' + imgDir slcFile = os.path.join(imgDir, imgDate+'.slc')
cmd = 'mv ' + file + ' ' + slcFile
print(cmd) print(cmd)
os.system(cmd) os.system(cmd)
@ -90,7 +93,6 @@ def main(iargs=None):
os.system(cmd) os.system(cmd)
shelveFile = os.path.join(imgDir, 'data') shelveFile = os.path.join(imgDir, 'data')
slcFile = os.path.join(imgDir, os.path.basename(file))
write_xml(shelveFile, slcFile) write_xml(shelveFile, slcFile)
if __name__ == '__main__': if __name__ == '__main__':

View File

@ -325,7 +325,10 @@ def main(iargs=None):
if not os.path.exists(runDir): if not os.path.exists(runDir):
os.makedirs(runDir) os.makedirs(runDir)
pairs = selectPairs(inps,stackMasterDate, slaveDates, acquisitionDates) if inps.sensor.lower() == 'uavsar_stack': # don't try to calculate baselines for UAVSAR_STACK data
pairs = selectPairs(inps,stackMasterDate, slaveDates, acquisitionDates,doBaselines=False)
else:
pairs = selectPairs(inps,stackMasterDate, slaveDates, acquisitionDates,doBaselines=True)
print ('number of pairs: ', len(pairs)) print ('number of pairs: ', len(pairs))
###If only a summary is requested quit after this ###If only a summary is requested quit after this

View File

@ -401,7 +401,8 @@ def runMultilook(in_dir, out_dir, alks, rlks):
return out_dir return out_dir
def runMultilookGdal(in_dir, out_dir, alks, rlks): def runMultilookGdal(in_dir, out_dir, alks, rlks, in_ext='.rdr', out_ext='.rdr',
fbase_list=['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask', 'waterMask']):
print('generate multilooked geometry files with alks={} and rlks={}'.format(alks, rlks)) print('generate multilooked geometry files with alks={} and rlks={}'.format(alks, rlks))
import gdal import gdal
@ -411,10 +412,9 @@ def runMultilookGdal(in_dir, out_dir, alks, rlks):
print('create directory: {}'.format(out_dir)) print('create directory: {}'.format(out_dir))
# multilook files one by one # multilook files one by one
for fbase in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask', 'waterMask']: for fbase in fbase_list:
fname = '{}.rdr'.format(fbase) in_file = os.path.join(in_dir, '{}{}'.format(fbase, in_ext))
in_file = os.path.join(in_dir, fname) out_file = os.path.join(out_dir, '{}{}'.format(fbase, out_ext))
out_file = os.path.join(out_dir, fname)
if os.path.isfile(in_file): if os.path.isfile(in_file):
ds = gdal.Open(in_file, gdal.GA_ReadOnly) ds = gdal.Open(in_file, gdal.GA_ReadOnly)
@ -434,8 +434,9 @@ def runMultilookGdal(in_dir, out_dir, alks, rlks):
# copy the full resolution xml/vrt file from ./merged/geom_master to ./geom_master # copy the full resolution xml/vrt file from ./merged/geom_master to ./geom_master
# to facilitate the number of looks extraction # to facilitate the number of looks extraction
# the file path inside .xml file is not, but should, updated # the file path inside .xml file is not, but should, updated
shutil.copy(in_file+'.xml', out_file+'.full.xml') if in_file != out_file+'.full':
shutil.copy(in_file+'.vrt', out_file+'.full.vrt') shutil.copy(in_file+'.xml', out_file+'.full.xml')
shutil.copy(in_file+'.vrt', out_file+'.full.vrt')
return out_dir return out_dir

View File

@ -30,10 +30,12 @@
import logging import logging
import isce
import isceobj
import argparse import argparse
import os import os
import isce
import isceobj
from isceobj.TopsProc.runBurstIfg import computeCoherence
logger = logging.getLogger('isce.tops.runFilter') logger = logging.getLogger('isce.tops.runFilter')
def runFilter(infile, outfile, filterStrength): def runFilter(infile, outfile, filterStrength):
@ -135,7 +137,11 @@ def createParser():
dest='cohfile') dest='cohfile')
parser.add_argument('-s', '--strength', type=float, default=0.5, help='Filter strength', parser.add_argument('-s', '--strength', type=float, default=0.5, help='Filter strength',
dest='filterstrength') dest='filterstrength')
parser.add_argument('--slc1', type=str, help="SLC 1", dest='slc1')
parser.add_argument('--slc2', type=str, help="SLC 2", dest='slc2')
parser.add_argument('--cc','--complex_coh',type=str, default='fine.cori.full',help='complex coherence file',dest='cpx_cohfile')
parser.add_argument('-r','--range_looks',type=int, default=9, help= 'range looks', dest='numberRangelooks')
parser.add_argument('-z','--azimuth_looks',type=int, default=3, help= 'azimuth looks', dest='numberAzlooks')
return parser return parser
def cmdLineParse(iargs=None): def cmdLineParse(iargs=None):
@ -152,7 +158,31 @@ def main(iargs=None):
runFilter(inps.infile, inps.filtfile, inps.filterstrength) runFilter(inps.infile, inps.filtfile, inps.filterstrength)
estCoherence(inps.filtfile, inps.cohfile) estCoherence(inps.filtfile, inps.cohfile)
if inps.slc1 and inps.slc2:
computeCoherence(inps.slc1,inps.slc2,inps.cpx_cohfile)
from mroipac.looks.Looks import Looks
print('Multilooking {0} ...'.format(inps.cpx_cohfile))
infile=inps.cpx_cohfile
inimg = isceobj.createImage()
inimg.load(infile + '.xml')
alks=inps.numberAzlooks
rlks=inps.numberRangelooks
spl = os.path.splitext(inimg.filename)
#ext = '.{0}alks_{1}rlks'.format(alks, rlks)
#outname = spl[0] + ext + spl[1]
outname=spl[0]
lkObj = Looks()
lkObj.setDownLooks(alks)
lkObj.setAcrossLooks(rlks)
lkObj.setInputImage(inimg)
lkObj.setOutputFilename(outname)
lkObj.looks()
fullfilename=inps.cpx_cohfile
ret=os.system('rm '+fullfilename)
if __name__ == '__main__': if __name__ == '__main__':
main() main()

View File

@ -57,7 +57,6 @@ Download of DEM (need to use wgs84 version) using the ISCE DEM download script.
mkdir DEM; cd DEM mkdir DEM; cd DEM
dem.py -a stitch -b 18 20 -100 -97 -r -s 1 c dem.py -a stitch -b 18 20 -100 -97 -r -s 1 c
rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt
fixImageXml.py -f -i demLat*.dem.wgs84 #Updating DEMs wgs84 xml to include full path to the DEM
cd .. cd ..
``` ```

View File

@ -205,7 +205,12 @@ class config(object):
self.f.write('filt : ' + self.filtName + '\n') self.f.write('filt : ' + self.filtName + '\n')
self.f.write('coh : ' + self.cohName + '\n') self.f.write('coh : ' + self.cohName + '\n')
self.f.write('strength : ' + self.filtStrength + '\n') self.f.write('strength : ' + self.filtStrength + '\n')
self.f.write('slc1 : ' + self.slc1 + '\n')
self.f.write('slc2 : ' + self.slc2 + '\n')
self.f.write('complex_coh : '+ self.cpxcor + '\n')
self.f.write('range_looks : ' + self.rangeLooks + '\n')
self.f.write('azimuth_looks : ' + self.azimuthLooks + '\n')
def unwrap(self, function): def unwrap(self, function):
self.f.write('###################################'+'\n') self.f.write('###################################'+'\n')
self.f.write(function + '\n') self.f.write(function + '\n')
@ -629,12 +634,16 @@ class run(object):
master = pair[0] master = pair[0]
slave = pair[1] slave = pair[1]
mergedDir = os.path.join(self.work_dir, 'merged/interferograms/' + master + '_' + slave) mergedDir = os.path.join(self.work_dir, 'merged/interferograms/' + master + '_' + slave)
mergedSLCDir = os.path.join(self.work_dir, 'merged/SLC')
configName = os.path.join(self.config_path ,'config_igram_filt_coh_' + master + '_' + slave) configName = os.path.join(self.config_path ,'config_igram_filt_coh_' + master + '_' + slave)
configObj = config(configName) configObj = config(configName)
configObj.configure(self) configObj.configure(self)
configObj.input = os.path.join(mergedDir,'fine.int') configObj.input = os.path.join(mergedDir,'fine.int')
configObj.filtName = os.path.join(mergedDir,'filt_fine.int') configObj.filtName = os.path.join(mergedDir,'filt_fine.int')
configObj.cohName = os.path.join(mergedDir,'filt_fine.cor') configObj.cohName = os.path.join(mergedDir,'filt_fine.cor')
configObj.slc1=os.path.join(mergedSLCDir, '{}/{}.slc.full'.format(master, master))
configObj.slc2=os.path.join(mergedSLCDir, '{}/{}.slc.full'.format(slave, slave))
configObj.cpxcor=os.path.join(mergedDir,'fine.cor.full')
#configObj.filtStrength = str(self.filtStrength) #configObj.filtStrength = str(self.filtStrength)
configObj.FilterAndCoherence('[Function-1]') configObj.FilterAndCoherence('[Function-1]')
configObj.finalize() configObj.finalize()

View File

@ -390,8 +390,7 @@ def main(iargs=None):
fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1])) for x in range(minBurst, maxBurst+1)]) fileList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0] + '_%02d.%s'%(x,namePattern[1])) for x in range(minBurst, maxBurst+1)])
mergedir = os.path.dirname(inps.outfile) mergedir = os.path.dirname(inps.outfile)
if not os.path.isdir(mergedir): os.makedirs(mergedir, exist_ok=True)
os.makedirs(mergedir)
suffix = '.full' suffix = '.full'
if (inps.numberRangeLooks == 1) and (inps.numberAzimuthLooks==1): if (inps.numberRangeLooks == 1) and (inps.numberAzimuthLooks==1):

View File

@ -430,11 +430,16 @@ def slcStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, upd
############################# #############################
i=0 i=0
if not updateStack:
i += 1
runObj = run()
runObj.configure(inps, 'run_' + str(i) + "_unpack_topo_master")
runObj.unpackStackMasterSLC(safe_dict)
runObj.finalize()
i+=1 i+=1
runObj = run() runObj = run()
runObj.configure(inps, 'run_' + str(i) + "_unpack_slc_topo_master") runObj.configure(inps, 'run_' + str(i) + "_unpack_slave_slc")
if not updateStack:
runObj.unpackStackMasterSLC(safe_dict)
runObj.unpackSlavesSLC(stackMasterDate, slaveDates, safe_dict) runObj.unpackSlavesSLC(stackMasterDate, slaveDates, safe_dict)
runObj.finalize() runObj.finalize()
@ -505,12 +510,14 @@ def slcStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, upd
def correlationStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, pairs, updateStack): def correlationStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, pairs, updateStack):
############################# #############################
i = slcStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, updateStack) i = slcStack(inps, acquisitionDates,stackMasterDate, slaveDates, safe_dict, updateStack)
i+=1 i+=1
runObj = run() runObj = run()
runObj.configure(inps, 'run_' + str(i) + "_merge_master") runObj.configure(inps, 'run_' + str(i) + "_merge_master_slave_slc")
runObj.mergeMaster(stackMasterDate, virtual = 'False') runObj.mergeMaster(stackMasterDate, virtual = 'True')
runObj.mergeSlaveSLC(slaveDates, virtual = 'True')
runObj.finalize() runObj.finalize()
i+=1 i+=1
@ -530,6 +537,13 @@ def interferogramStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe
i = slcStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, updateStack) i = slcStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, updateStack)
i+=1
runObj = run()
runObj.configure(inps, 'run_' + str(i) + "_merge_master_slave_slc")
runObj.mergeMaster(stackMasterDate, virtual = 'True')
runObj.mergeSlaveSLC(slaveDates, virtual = 'True')
runObj.finalize()
i+=1 i+=1
runObj = run() runObj = run()
runObj.configure(inps, 'run_' + str(i) + "_merge_burst_igram") runObj.configure(inps, 'run_' + str(i) + "_merge_burst_igram")
@ -548,12 +562,6 @@ def interferogramStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe
runObj.unwrap(pairs) runObj.unwrap(pairs)
runObj.finalize() runObj.finalize()
i+=1
runObj = run()
runObj.configure(inps, 'run_' + str(i) + "_merge_master_slave_slc")
runObj.mergeMaster(stackMasterDate, virtual = 'True')
runObj.mergeSlaveSLC(slaveDates, virtual = 'True')
runObj.finalize()
def offsetStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, pairs, updateStack): def offsetStack(inps, acquisitionDates, stackMasterDate, slaveDates, safe_dict, pairs, updateStack):

91
contrib/stack/topsStack/topo.py Normal file → Executable file
View File

@ -10,6 +10,8 @@ import sys
import s1a_isce_utils as ut import s1a_isce_utils as ut
from isceobj.Planet.Planet import Planet from isceobj.Planet.Planet import Planet
from zerodop.topozero import createTopozero from zerodop.topozero import createTopozero
import multiprocessing as mp
def createParser(): def createParser():
parser = argparse.ArgumentParser( description='Generates lat/lon/h and los for each pixel') parser = argparse.ArgumentParser( description='Generates lat/lon/h and los for each pixel')
@ -30,6 +32,49 @@ def cmdLineParse(iargs=None):
return parser.parse_args(args=iargs) return parser.parse_args(args=iargs)
def call_topo(input):
(dirname, demImage, master, ind) = input
burst = master.bursts[ind]
latname = os.path.join(dirname, 'lat_%02d.rdr' % (ind + 1))
lonname = os.path.join(dirname, 'lon_%02d.rdr' % (ind + 1))
hgtname = os.path.join(dirname, 'hgt_%02d.rdr' % (ind + 1))
losname = os.path.join(dirname, 'los_%02d.rdr' % (ind + 1))
maskname = os.path.join(dirname, 'shadowMask_%02d.rdr' % (ind + 1))
incname = os.path.join(dirname, 'incLocal_%02d.rdr' % (ind + 1))
#####Run Topo
planet = Planet(pname='Earth')
topo = createTopozero()
topo.slantRangePixelSpacing = burst.rangePixelSize
topo.prf = 1.0 / burst.azimuthTimeInterval
topo.radarWavelength = burst.radarWavelength
topo.orbit = burst.orbit
topo.width = burst.numberOfSamples
topo.length = burst.numberOfLines
topo.wireInputPort(name='dem', object=demImage)
topo.wireInputPort(name='planet', object=planet)
topo.numberRangeLooks = 1
topo.numberAzimuthLooks = 1
topo.lookSide = -1
topo.sensingStart = burst.sensingStart
topo.rangeFirstSample = burst.startingRange
topo.demInterpolationMethod = 'BIQUINTIC'
topo.latFilename = latname
topo.lonFilename = lonname
topo.heightFilename = hgtname
topo.losFilename = losname
topo.maskFilename = maskname
topo.incFilename = incname
topo.topo()
bbox = [topo.minimumLatitude, topo.maximumLatitude, topo.minimumLongitude, topo.maximumLongitude]
topo = None
return bbox
def main(iargs=None): def main(iargs=None):
inps = cmdLineParse(iargs) inps = cmdLineParse(iargs)
@ -40,6 +85,8 @@ def main(iargs=None):
demImage.load(inps.dem + '.xml') demImage.load(inps.dem + '.xml')
boxes = [] boxes = []
inputs = []
for swath in swathList: for swath in swathList:
master = ut.loadProduct(os.path.join(inps.master , 'IW{0}.xml'.format(swath))) master = ut.loadProduct(os.path.join(inps.master , 'IW{0}.xml'.format(swath)))
@ -51,50 +98,20 @@ def main(iargs=None):
else: else:
os.makedirs(dirname) os.makedirs(dirname)
###For each burst
for ind in range(master.numberOfBursts): for ind in range(master.numberOfBursts):
burst = master.bursts[ind] inputs.append((dirname, demImage, master, ind))
latname = os.path.join(dirname, 'lat_%02d.rdr'%(ind+1)) pool = mp.Pool(mp.cpu_count())
lonname = os.path.join(dirname, 'lon_%02d.rdr'%(ind+1)) results = pool.map(call_topo, inputs)
hgtname = os.path.join(dirname, 'hgt_%02d.rdr'%(ind+1)) pool.close()
losname = os.path.join(dirname, 'los_%02d.rdr'%(ind+1))
maskname = os.path.join(dirname, 'shadowMask_%02d.rdr'%(ind+1))
incname = os.path.join(dirname, 'incLocal_%02d.rdr'%(ind+1))
#####Run Topo
planet = Planet(pname='Earth')
topo = createTopozero()
topo.slantRangePixelSpacing = burst.rangePixelSize
topo.prf = 1.0/burst.azimuthTimeInterval
topo.radarWavelength = burst.radarWavelength
topo.orbit = burst.orbit
topo.width = burst.numberOfSamples
topo.length = burst.numberOfLines
topo.wireInputPort(name='dem', object=demImage)
topo.wireInputPort(name='planet', object=planet)
topo.numberRangeLooks = 1
topo.numberAzimuthLooks = 1
topo.lookSide = -1
topo.sensingStart = burst.sensingStart
topo.rangeFirstSample = burst.startingRange
topo.demInterpolationMethod='BIQUINTIC'
topo.latFilename = latname
topo.lonFilename = lonname
topo.heightFilename = hgtname
topo.losFilename = losname
topo.maskFilename = maskname
topo.incFilename = incname
topo.topo()
bbox = [topo.minimumLatitude, topo.maximumLatitude, topo.minimumLongitude, topo.maximumLongitude] for bbox in results:
boxes.append(bbox) boxes.append(bbox)
topo = None
boxes = np.array(boxes) boxes = np.array(boxes)
bbox = [np.min(boxes[:,0]), np.max(boxes[:,1]), np.min(boxes[:,2]), np.max(boxes[:,3])] bbox = [np.min(boxes[:,0]), np.max(boxes[:,1]), np.min(boxes[:,2]), np.max(boxes[:,3])]
print ('bbox : ',bbox) print ('bbox : ',bbox)
if __name__ == '__main__': if __name__ == '__main__':