Merge pull request #69 from vbrancat/master

Range rubber sheeting modifications
LT1AB
Heresh Fattahi 2019-10-18 09:13:15 -07:00 committed by GitHub
commit 629f09755b
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 708 additions and 50 deletions

View File

@ -242,14 +242,20 @@ FILTER_STRENGTH = Application.Parameter('filterStrength',
mandatory=False,
doc='')
DO_RUBBERSHEETING = Application.Parameter('doRubbersheeting',
public_name='do rubbersheeting',
############################################## Modified by V.Brancato 10.07.2019
DO_RUBBERSHEETINGAZIMUTH = Application.Parameter('doRubbersheetingAzimuth',
public_name='do rubbersheetingAzimuth',
default=False,
type=bool,
mandatory=False,
doc='')
DO_RUBBERSHEETINGRANGE = Application.Parameter('doRubbersheetingRange',
public_name='do rubbersheetingRange',
default=False,
type=bool,
mandatory=False,
doc='')
#################################################################################
RUBBERSHEET_SNR_THRESHOLD = Application.Parameter('rubberSheetSNRThreshold',
public_name='rubber sheet SNR Threshold',
default = 5.0,
@ -533,7 +539,8 @@ class _RoiBase(Application, FrameMixin):
GEOCODE_BOX,
REGION_OF_INTEREST,
HEIGHT_RANGE,
DO_RUBBERSHEETING,
DO_RUBBERSHEETINGRANGE, #Modified by V. Brancato 10.07.2019
DO_RUBBERSHEETINGAZIMUTH, #Modified by V. Brancato 10.07.2019
RUBBERSHEET_SNR_THRESHOLD,
RUBBERSHEET_FILTER_SIZE,
DO_DENSEOFFSETS,
@ -724,7 +731,8 @@ class _RoiBase(Application, FrameMixin):
self.runResampleSlc = StripmapProc.createResampleSlc(self)
self.runRefineSlaveTiming = StripmapProc.createRefineSlaveTiming(self)
self.runDenseOffsets = StripmapProc.createDenseOffsets(self)
self.runRubbersheet = StripmapProc.createRubbersheet(self)
self.runRubbersheetRange = StripmapProc.createRubbersheetRange(self) #Modified by V. Brancato 10.07.2019
self.runRubbersheetAzimuth =StripmapProc.createRubbersheetAzimuth(self) #Modified by V. Brancato 10.07.2019
self.runResampleSubbandSlc = StripmapProc.createResampleSubbandSlc(self)
self.runInterferogram = StripmapProc.createInterferogram(self)
self.runFilter = StripmapProc.createFilter(self)
@ -774,8 +782,11 @@ class _RoiBase(Application, FrameMixin):
args=('refined',))
self.step('dense_offsets', func=self.runDenseOffsets)
######################################################################## Modified by V. Brancato 10.07.2019
self.step('rubber_sheet_range', func=self.runRubbersheetRange)
self.step('rubber_sheet', func=self.runRubbersheet)
self.step('rubber_sheet_azimuth',func=self.runRubbersheetAzimuth)
#########################################################################
self.step('fine_resample', func=self.runResampleSlc,
args=('fine',))
@ -853,9 +864,13 @@ class _RoiBase(Application, FrameMixin):
# run dense offsets
self.runDenseOffsets()
############ Modified by V. Brancato 10.07.2019
# adding the azimuth offsets computed from cross correlation to geometry offsets
self.runRubbersheet()
self.runRubbersheetAzimuth()
# adding the range offsets computed from cross correlation to geometry offsets
self.runRubbersheetRange()
####################################################################################
# resampling using rubbersheeted offsets
# which include geometry + constant range + constant azimuth
# + dense azimuth offsets

View File

@ -325,7 +325,7 @@ AZIMUTH_OFFSET_FILENAME = Component.Parameter('azimuthOffsetFilename',
doc='')
# Modified by V. Brancato 10.07.2019
AZIMUTH_RUBBERSHEET_FILENAME = Component.Parameter('azimuthRubbersheetFilename',
public_name='azimuth Rubbersheet Image Name',
default = 'azimuth_sheet.off',
@ -333,6 +333,13 @@ AZIMUTH_RUBBERSHEET_FILENAME = Component.Parameter('azimuthRubbersheetFilename',
mandatory=False,
doc='')
RANGE_RUBBERSHEET_FILENAME = Component.Parameter('rangeRubbersheetFilename',
public_name='range Rubbersheet Image Name',
default = 'range_sheet.off',
type=str,
mandatory=False,
doc='')
# End of modification
MISREG_FILENAME = Component.Parameter('misregFilename',
public_name='misreg file name',
default='misreg',
@ -346,7 +353,7 @@ DENSE_OFFSET_FILENAME = Component.Parameter('denseOffsetFilename',
type=str,
mandatory=False,
doc='file name of dense offsets computed from cross correlating two SLC images')
# Modified by V. Brancato 10.07.2019
FILT_AZIMUTH_OFFSET_FILENAME = Component.Parameter('filtAzimuthOffsetFilename',
public_name='filtered azimuth offset filename',
default='filtAzimuth.off',
@ -354,6 +361,13 @@ FILT_AZIMUTH_OFFSET_FILENAME = Component.Parameter('filtAzimuthOffsetFilename',
mandatory=False,
doc='Filtered azimuth dense offsets')
FILT_RANGE_OFFSET_FILENAME = Component.Parameter('filtRangeOffsetFilename',
public_name='filtered range offset filename',
default='filtRange.off',
type=str,
mandatory=False,
doc='Filtered range dense offsets')
# End of modification
DISPERSIVE_FILENAME = Component.Parameter('dispersiveFilename',
public_name = 'dispersive phase filename',
default='dispersive.bil',
@ -470,8 +484,10 @@ class StripmapProc(Component, FrameMixin):
LOS_FILENAME,
RANGE_OFFSET_FILENAME,
AZIMUTH_OFFSET_FILENAME,
AZIMUTH_RUBBERSHEET_FILENAME,
FILT_AZIMUTH_OFFSET_FILENAME,
AZIMUTH_RUBBERSHEET_FILENAME, # Added by V. Brancato 10.07.2019
RANGE_RUBBERSHEET_FILENAME, # Added by V. Brancato 10.07.2019
FILT_AZIMUTH_OFFSET_FILENAME, # Added by V. Brancato 10.07.2019
FILT_RANGE_OFFSET_FILENAME, # Added by V. Brancato 10.07.2019
DENSE_OFFSET_FILENAME,
MISREG_FILENAME,
DISPERSIVE_FILENAME,

View File

@ -1,14 +1,73 @@
#
# Author: Heresh Fattahi, 2017
#
# Modified by V. Brancato (10.2019)
# (Included flattening when rubbersheeting in range is turned on
import isceobj
import logging
from components.stdproc.stdproc import crossmul
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
import os
import gdal
import numpy as np
logger = logging.getLogger('isce.insar.runInterferogram')
# Added by V. Brancato 10.09.2019
def write_xml(fileName,width,length,bands,dataType,scheme):
img = isceobj.createImage()
img.setFilename(fileName)
img.setWidth(width)
img.setLength(length)
img.setAccessMode('READ')
img.bands = bands
img.dataType = dataType
img.scheme = scheme
img.renderHdr()
img.renderVRT()
return None
def compute_FlatEarth(self,width,length):
from imageMath import IML
import logging
# If rubbersheeting has been performed add back the range sheet offsets
info = self._insar.loadProduct(self._insar.slaveSlcCropProduct)
radarWavelength = info.getInstrument().getRadarWavelength()
rangePixelSize = info.getInstrument().getRangePixelSize()
fact = 4 * np.pi* rangePixelSize / radarWavelength
cJ = np.complex64(-1j)
# Open the range sheet offset
rngOff = os.path.join(self.insar.offsetsDirname, self.insar.rangeOffsetFilename )
print(rngOff)
if os.path.exists(rngOff):
rng2 = np.memmap(rngOff, dtype=np.float64, mode='r', shape=(length,width))
else:
print('No range offsets provided')
rng2 = np.zeros((length,width))
# Open the interferogram
ifgFilename= os.path.join(self.insar.ifgDirname, self.insar.ifgFilename)
intf = np.memmap(ifgFilename+'.full',dtype=np.complex64,mode='r+',shape=(length,width))
for ll in range(length):
intf[ll,:] *= np.exp(cJ*fact*rng2[ll,:])
del rng2
del intf
return
def multilook(infile, outname=None, alks=5, rlks=15):
'''
Take looks.
@ -66,8 +125,8 @@ def computeCoherence(slc1name, slc2name, corname, virtual=True):
slc2.finalizeImage()
return
def generateIgram(imageSlc1, imageSlc2, resampName, azLooks, rgLooks):
# Modified by V. Brancato on 10.09.2019 (added self)
def generateIgram(self,imageSlc1, imageSlc2, resampName, azLooks, rgLooks):
objSlc1 = isceobj.createSlcImage()
IU.copyAttributes(imageSlc1, objSlc1)
objSlc1.setAccessMode('read')
@ -79,7 +138,12 @@ def generateIgram(imageSlc1, imageSlc2, resampName, azLooks, rgLooks):
objSlc2.createImage()
slcWidth = imageSlc1.getWidth()
intWidth = int(slcWidth / rgLooks)
if not self.doRubbersheetingRange:
intWidth = int(slcWidth/rgLooks) # Modified by V. Brancato intWidth = int(slcWidth / rgLooks)
else:
intWidth = int(slcWidth)
lines = min(imageSlc1.getLength(), imageSlc2.getLength())
@ -93,7 +157,7 @@ def generateIgram(imageSlc1, imageSlc2, resampName, azLooks, rgLooks):
resampInt = resampName
objInt = isceobj.createIntImage()
objInt.setFilename(resampInt)
objInt.setFilename(resampInt+'.full')
objInt.setWidth(intWidth)
imageInt = isceobj.createIntImage()
IU.copyAttributes(objInt, imageInt)
@ -101,21 +165,41 @@ def generateIgram(imageSlc1, imageSlc2, resampName, azLooks, rgLooks):
objInt.createImage()
objAmp = isceobj.createAmpImage()
objAmp.setFilename(resampAmp)
objAmp.setFilename(resampAmp+'.full')
objAmp.setWidth(intWidth)
imageAmp = isceobj.createAmpImage()
IU.copyAttributes(objAmp, imageAmp)
objAmp.setAccessMode('write')
objAmp.createImage()
objCrossmul = crossmul.createcrossmul()
objCrossmul.width = slcWidth
objCrossmul.length = lines
objCrossmul.LooksDown = azLooks
objCrossmul.LooksAcross = rgLooks
if not self.doRubbersheetingRange:
print('Rubbersheeting in range is off, interferogram is already flattened')
objCrossmul = crossmul.createcrossmul()
objCrossmul.width = slcWidth
objCrossmul.length = lines
objCrossmul.LooksDown = azLooks
objCrossmul.LooksAcross = rgLooks
objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp)
objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp)
else:
# Modified by V. Brancato 10.09.2019 (added option to add Range Rubber sheet Flat-earth back)
print('Rubbersheeting in range is on, removing flat-Earth phase')
objCrossmul = crossmul.createcrossmul()
objCrossmul.width = slcWidth
objCrossmul.length = lines
objCrossmul.LooksDown = 1
objCrossmul.LooksAcross = 1
objCrossmul.crossmul(objSlc1, objSlc2, objInt, objAmp)
# Remove Flat-Earth component
compute_FlatEarth(self,intWidth,lines)
# Perform Multilook
multilook(resampInt+'.full', outname=resampInt, alks=azLooks, rlks=rgLooks) #takeLooks(objAmp,azLooks,rgLooks)
multilook(resampAmp+'.full', outname=resampAmp, alks=azLooks, rlks=rgLooks) #takeLooks(objInt,azLooks,rgLooks)
#os.system('rm ' + resampInt+'.full* ' + resampAmp + '.full* ')
# End of modification
for obj in [objInt, objAmp, objSlc1, objSlc2]:
obj.finalizeImage()
@ -142,7 +226,7 @@ def subBandIgram(self, masterSlc, slaveSlc, subBandDir):
interferogramName = os.path.join(ifgDir , self.insar.ifgFilename)
generateIgram(img1, img2, interferogramName, azLooks, rgLooks)
generateIgram(self,img1, img2, interferogramName, azLooks, rgLooks)
return interferogramName
@ -185,7 +269,7 @@ def runFullBandInterferogram(self):
masterFrame = self._insar.loadProduct( self._insar.masterSlcCropProduct)
masterSlc = masterFrame.getImage().filename
if self.doRubbersheeting:
if (self.doRubbersheetingRange | self.doRubbersheetingAzimuth):
slaveSlc = os.path.join(self._insar.coregDirname, self._insar.fineCoregFilename)
else:
slaveSlc = os.path.join(self._insar.coregDirname, self._insar.refinedCoregFilename)
@ -212,7 +296,7 @@ def runFullBandInterferogram(self):
interferogramName = os.path.join(ifgDir , self.insar.ifgFilename)
generateIgram(img1, img2, interferogramName, azLooks, rgLooks)
generateIgram(self,img1, img2, interferogramName, azLooks, rgLooks)
###Compute coherence
@ -221,7 +305,7 @@ def runFullBandInterferogram(self):
multilook(cohname+'.full', outname=cohname, alks=azLooks, rlks=rgLooks)
###Multilook relevant geometry products
##Multilook relevant geometry products
for fname in [self.insar.latFilename, self.insar.lonFilename, self.insar.losFilename]:
inname = os.path.join(self.insar.geometryDirname, fname)
multilook(inname + '.full', outname= inname, alks=azLooks, rlks=rgLooks)

View File

@ -23,7 +23,7 @@ def runResampleSlc(self, kind='coarse'):
raise Exception('Unknown operation type {0} in runResampleSlc'.format(kind))
if kind == 'fine':
if not self.doRubbersheeting:
if not (self.doRubbersheetingRange | self.doRubbersheetingAzimuth): # Modified by V. Brancato 10.10.2019
print('Rubber sheeting not requested, skipping resampling ....')
return
@ -68,11 +68,24 @@ def runResampleSlc(self, kind='coarse'):
#Since the app is based on geometry module we expect pixel-by-pixel offset
#field
offsetsDir = self.insar.offsetsDirname
rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
# Modified by V. Brancato 10.10.2019
#rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
if kind in ['coarse', 'refined']:
azname = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename)
rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
else:
azname = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
if self.doRubbersheetingRange:
print('Rubbersheeting in range is turned on, taking the cross-correlation offsets')
print('Setting Flattening to False')
rgname = os.path.join(offsetsDir, self.insar.rangeRubbersheetFilename)
flatten=False
else:
print('Rubbersheeting in range is turned off, taking range geometric offsets')
rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
flatten=True
rngImg = isceobj.createImage()
rngImg.load(rgname + '.xml')
@ -85,8 +98,8 @@ def runResampleSlc(self, kind='coarse'):
width = rngImg.getWidth()
length = rngImg.getLength()
flatten = True
# Modified by V. Brancato 10.10.2019
#flatten = True
rObj.flatten = flatten
rObj.outputWidth = width
rObj.outputLines = length

View File

@ -14,7 +14,8 @@ import shelve
logger = logging.getLogger('isce.insar.runResampleSubbandSlc')
def resampleSlc(masterFrame, slaveFrame, imageSlc2, radarWavelength, coregDir,
# Modified by V. Brancato 10.14.2019 added "self" as input parameter of resampleSLC
def resampleSlc(self,masterFrame, slaveFrame, imageSlc2, radarWavelength, coregDir,
azoffname, rgoffname, azpoly = None, rgpoly = None, misreg=False):
logger.info("Resampling slave SLC")
@ -56,8 +57,15 @@ def resampleSlc(masterFrame, slaveFrame, imageSlc2, radarWavelength, coregDir,
width = rngImg.getWidth()
length = rngImg.getLength()
# Modified by V. Brancato on 10.14.2019 (if Rubbersheeting in range is turned on, flatten the interferogram during cross-correlation)
if not self.doRubbersheetingRange:
print('Rubber sheeting in range is turned off, flattening the interferogram during resampling')
flatten = True
else:
print('Rubber sheeting in range is turned on, flattening the interferogram during interferogram formation')
flatten=False
# end of Modification
flatten = True
rObj.flatten = flatten
rObj.outputWidth = width
rObj.outputLines = length
@ -105,15 +113,25 @@ def runResampleSubbandSlc(self, misreg=False):
masterFrame = self._insar.loadProduct( self._insar.masterSlcCropProduct)
slaveFrame = self._insar.loadProduct( self._insar.slaveSlcCropProduct)
if self.doRubbersheeting:
print('Using rubber sheeted offsets for resampling sub-bands')
# Modified by V. Brancato 10.14.2019
if self.doRubbersheetingAzimuth:
print('Using rubber in azimuth sheeted offsets for resampling sub-bands')
azoffname = os.path.join( self.insar.offsetsDirname, self.insar.azimuthRubbersheetFilename)
else:
print('Using refined offsets for resampling sub-bands')
azoffname = os.path.join( self.insar.offsetsDirname, self.insar.azimuthOffsetFilename)
rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeOffsetFilename)
if self.doRubbersheetingRange:
print('Using rubber in range sheeted offsets for resampling sub-bands')
rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeRubbersheetFilename)
else:
print('Using refined offsets for resampling sub-bands')
rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeOffsetFilename)
# ****************** End of Modification
# rgoffname = os.path.join( self.insar.offsetsDirname, self.insar.rangeOffsetFilename)
azpoly = self.insar.loadProduct( os.path.join(self.insar.misregDirname, self.insar.misregFilename) + '_az.xml')
rgpoly = self.insar.loadProduct( os.path.join(self.insar.misregDirname, self.insar.misregFilename) + '_rg.xml')
@ -124,7 +142,7 @@ def runResampleSubbandSlc(self, misreg=False):
wvlL = self.insar.lowBandRadarWavelength
coregDir = os.path.join(self.insar.coregDirname, self.insar.lowBandSlcDirname)
lowbandCoregFilename = resampleSlc(masterFrame, slaveFrame, imageSlc2, wvlL, coregDir,
lowbandCoregFilename = resampleSlc(self,masterFrame, slaveFrame, imageSlc2, wvlL, coregDir,
azoffname, rgoffname, azpoly=azpoly, rgpoly=rgpoly,misreg=False)
imageSlc2 = os.path.join(self.insar.splitSpectrumDirname, self.insar.highBandSlcDirname,
@ -132,7 +150,7 @@ def runResampleSubbandSlc(self, misreg=False):
wvlH = self.insar.highBandRadarWavelength
coregDir = os.path.join(self.insar.coregDirname, self.insar.highBandSlcDirname)
highbandCoregFilename = resampleSlc(masterFrame, slaveFrame, imageSlc2, wvlH, coregDir,
highbandCoregFilename = resampleSlc(self,masterFrame, slaveFrame, imageSlc2, wvlH, coregDir,
azoffname, rgoffname, azpoly=azpoly, rgpoly=rgpoly, misreg=False)
self.insar.lowBandSlc2 = lowbandCoregFilename

View File

@ -0,0 +1,255 @@
#
# Author: Heresh Fattahi
# Copyright 2017
#
import isce
import isceobj
from osgeo import gdal
from scipy import ndimage
from astropy.convolution import convolve
import numpy as np
import os
def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
# Masking the offsets with a data-based approach
# Open the offsets
ds = gdal.Open(denseOffsetFile+'.vrt',gdal.GA_ReadOnly)
off_az = ds.GetRasterBand(1).ReadAsArray()
off_rg = ds.GetRasterBand(2).ReadAsArray()
ds = None
off_az_copy = np.copy(off_az)
off_rg_copy = np.copy(off_rg)
# Compute MAD
off_azm = ndimage.median_filter(off_az,filterSize)
off_rgm = ndimage.median_filter(off_rg,filterSize)
metric_rg = np.abs(off_rgm-off_rg)
metric_az = np.abs(off_azm-off_az)
idx = np.where((metric_rg > 3) | (metric_az > 3))
# Remove missing values in the offset map
off_az_copy[np.where(off_az_copy<-9999)]=np.nan
off_az_copy[idx]=np.nan
# Fill the offsets with smoothed values
off_az_filled = fill_with_smoothed(off_az_copy,filterSize)
# Median filter the offsets
off_az_filled = ndimage.median_filter(off_az_filled,filterSize)
# Save the filtered offsets
length, width = off_az_filled.shape
# writing the masked and filtered offsets to a file
print ('writing masked and filtered offsets to: ', outName)
##Write array to offsetfile
off_az_filled.tofile(outName)
# write the xml file
img = isceobj.createImage()
img.setFilename(outName)
img.setWidth(width)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'FLOAT'
img.scheme = 'BIP'
img.renderHdr()
return None
def fill(data, invalid=None):
"""
Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell
Input:
data: numpy array of any dimension
invalid: a binary array of same shape as 'data'.
data value are replaced where invalid is True
If None (default), use: invalid = np.isnan(data)
Output:
Return a filled array.
"""
if invalid is None: invalid = np.isnan(data)
ind = ndimage.distance_transform_edt(invalid,
return_distances=False,
return_indices=True)
return data[tuple(ind)]
def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName):
#masking and Filtering
##Read in the offset file
ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly)
Offset = ds.GetRasterBand(band).ReadAsArray()
ds = None
##Read in the SNR file
ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly)
snr = ds.GetRasterBand(1).ReadAsArray()
ds = None
# Masking the dense offsets based on SNR
print ('masking the dense offsets with SNR threshold: ', snrThreshold)
Offset[snr<snrThreshold]=np.nan
# Fill the masked region using valid neighboring pixels
Offset = fill(Offset)
############
# Median filtering the masked offsets
print ('Filtering with median filter with size : ', filterSize)
Offset = ndimage.median_filter(Offset, size=filterSize)
length, width = Offset.shape
# writing the masked and filtered offsets to a file
print ('writing masked and filtered offsets to: ', outName)
##Write array to offsetfile
Offset.tofile(outName)
# write the xml file
img = isceobj.createImage()
img.setFilename(outName)
img.setWidth(width)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'FLOAT'
img.scheme = 'BIP'
img.renderHdr()
return None
def fill_with_smoothed(off,filterSize):
off_2filt=np.copy(off)
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
loop = 0
cnt2=1
while (loop<100):
loop += 1
idx2= np.isnan(off_2filt)
cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt)))
if cnt2 != 0:
off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate')
off_2filt[idx2]=off_filt[idx2]
idx3 = np.where(off_filt == 0)
off_2filt[idx3]=np.nan
off_filt=None
return off_2filt
def resampleOffset(maskedFiltOffset, geometryOffset, outName):
'''
Oversample offset and add.
'''
from imageMath import IML
import logging
resampledOffset = maskedFiltOffset + ".resampled"
inimg = isceobj.createImage()
inimg.load(geometryOffset + '.xml')
length = inimg.getLength()
width = inimg.getWidth()
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
###As the input images. Once that is implemente, the math here should all be consistent.
###However, this is not too far off since the skip for doing dense offsets is generally large.
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
cmd = 'gdal_translate -of ENVI -ot Float64 -outsize ' + str(width) + ' ' + str(length) + ' ' + maskedFiltOffset + '.vrt ' + resampledOffset
print(cmd)
os.system(cmd)
img = isceobj.createImage()
img.setFilename(resampledOffset)
img.setWidth(width)
img.setLength(length)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'DOUBLE'
img.scheme = 'BIP'
img.renderHdr()
###Adding the geometry offset and oversampled offset
geomoff = IML.mmapFromISCE(geometryOffset, logging)
osoff = IML.mmapFromISCE(resampledOffset, logging)
fid = open(outName, 'w')
for ll in range(length):
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
val.tofile(fid)
fid.close()
img = isceobj.createImage()
img.setFilename(outName)
img.setWidth(width)
img.setLength(length)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'DOUBLE'
img.scheme = 'BIP'
img.renderHdr()
return None
def runRubbersheetAzimuth(self):
if not self.doRubbersheetingAzimuth:
print('Rubber sheeting in azimuth not requested ... skipping')
return
# denseOffset file name computeed from cross-correlation
denseOffsetFile = os.path.join(self.insar.denseOffsetsDirname , self.insar.denseOffsetFilename)
snrFile = denseOffsetFile + "_snr.bil"
denseOffsetFile = denseOffsetFile + ".bil"
# we want the azimuth offsets only which are the first band
band = [1]
snrThreshold = self.rubberSheetSNRThreshold
filterSize = self.rubberSheetFilterSize
filtAzOffsetFile = os.path.join(self.insar.denseOffsetsDirname, self._insar.filtAzimuthOffsetFilename)
# masking and median filtering the dense offsets
if not self.doRubbersheetingRange:
print('Rubber sheeting in range is off, filtering the offsets with a SNR-based mask')
mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtAzOffsetFile)
else:
print('Rubber sheeting in range is on, filtering the offsets with data-based mask')
mask_filterNoSNR(denseOffsetFile, filterSize, filtAzOffsetFile)
# azimuth offsets computed from geometry
offsetsDir = self.insar.offsetsDirname
geometryAzimuthOffset = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename)
sheetOffset = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
# oversampling the filtAzOffsetFile to the same size of geometryAzimuthOffset
# and then update the geometryAzimuthOffset by adding the oversampled
# filtAzOffsetFile to it.
resampleOffset(filtAzOffsetFile, geometryAzimuthOffset, sheetOffset)
return None

View File

@ -0,0 +1,257 @@
#
# Author: Heresh Fattahi
# Copyright 2017
#
# Modified by V. Brancato (10.12.2019)
# Including offset filtering with no SNR masking
#
import isce
import isceobj
from osgeo import gdal
from scipy import ndimage
from astropy.convolution import convolve
import numpy as np
import os
def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
# Masking the offsets with a data-based approach
# Open the offsets
ds = gdal.Open(denseOffsetFile+'.vrt',gdal.GA_ReadOnly)
off_az = ds.GetRasterBand(1).ReadAsArray()
off_rg = ds.GetRasterBand(2).ReadAsArray()
ds = None
off_az_copy = np.copy(off_az)
off_rg_copy = np.copy(off_rg)
# Compute MAD
off_azm = ndimage.median_filter(off_az,filterSize)
off_rgm = ndimage.median_filter(off_rg,filterSize)
metric_rg = np.abs(off_rgm-off_rg)
metric_az = np.abs(off_azm-off_az)
idx = np.where((metric_rg > 3) | (metric_az > 3))
# Remove missing data in the offset map
off_rg_copy[np.where(off_rg_copy<-9999)]=np.nan
off_rg_copy[idx]=np.nan
# Fill the offsets with smoothed values
off_rg_filled = fill_with_smoothed(off_rg_copy,filterSize)
# Median filter the offsets
off_rg_filled = ndimage.median_filter(off_rg_filled,filterSize)
# Save the filtered offsets
length, width = off_rg_filled.shape
# writing the masked and filtered offsets to a file
print ('writing masked and filtered offsets to: ', outName)
##Write array to offsetfile
off_rg_filled.tofile(outName)
# write the xml file
img = isceobj.createImage()
img.setFilename(outName)
img.setWidth(width)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'FLOAT'
img.scheme = 'BIP'
img.renderHdr()
return None
def fill(data, invalid=None):
"""
Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell
Input:
data: numpy array of any dimension
invalid: a binary array of same shape as 'data'.
data value are replaced where invalid is True
If None (default), use: invalid = np.isnan(data)
Output:
Return a filled array.
"""
if invalid is None: invalid = np.isnan(data)
ind = ndimage.distance_transform_edt(invalid,
return_distances=False,
return_indices=True)
return data[tuple(ind)]
def fill_with_smoothed(off,filterSize):
off_2filt=np.copy(off)
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
loop = 0
cnt2=1
while (loop<100):
loop += 1
idx2= np.isnan(off_2filt)
cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt)))
if cnt2 != 0:
off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate')
off_2filt[idx2]=off_filt[idx2]
idx3 = np.where(off_filt == 0)
off_2filt[idx3]=np.nan
off_filt=None
return off_2filt
def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName):
#masking and Filtering
##Read in the offset file
ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly)
Offset = ds.GetRasterBand(band).ReadAsArray()
ds = None
##Read in the SNR file
ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly)
snr = ds.GetRasterBand(1).ReadAsArray()
ds = None
# Masking the dense offsets based on SNR
print ('masking the dense offsets with SNR threshold: ', snrThreshold)
Offset[snr<snrThreshold]=np.nan
# Fill the masked region using valid neighboring pixels
Offset = fill(Offset)
############
# Median filtering the masked offsets
print ('Filtering with median filter with size : ', filterSize)
Offset = ndimage.median_filter(Offset, size=filterSize)
length, width = Offset.shape
# writing the masked and filtered offsets to a file
print ('writing masked and filtered offsets to: ', outName)
##Write array to offsetfile
Offset.tofile(outName)
# write the xml file
img = isceobj.createImage()
img.setFilename(outName)
img.setWidth(width)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'FLOAT'
img.scheme = 'BIP'
img.renderHdr()
return None
def resampleOffset(maskedFiltOffset, geometryOffset, outName):
'''
Oversample offset and add.
'''
from imageMath import IML
import logging
resampledOffset = maskedFiltOffset + ".resampled"
inimg = isceobj.createImage()
inimg.load(geometryOffset + '.xml')
length = inimg.getLength()
width = inimg.getWidth()
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
###As the input images. Once that is implemente, the math here should all be consistent.
###However, this is not too far off since the skip for doing dense offsets is generally large.
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
cmd = 'gdal_translate -of ENVI -ot Float64 -outsize ' + str(width) + ' ' + str(length) + ' ' + maskedFiltOffset + '.vrt ' + resampledOffset
print(cmd)
os.system(cmd)
img = isceobj.createImage()
img.setFilename(resampledOffset)
img.setWidth(width)
img.setLength(length)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'DOUBLE'
img.scheme = 'BIP'
img.renderHdr()
###Adding the geometry offset and oversampled offset
geomoff = IML.mmapFromISCE(geometryOffset, logging)
osoff = IML.mmapFromISCE(resampledOffset, logging)
fid = open(outName, 'w')
for ll in range(length):
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
val.tofile(fid)
fid.close()
img = isceobj.createImage()
img.setFilename(outName)
img.setWidth(width)
img.setLength(length)
img.setAccessMode('READ')
img.bands = 1
img.dataType = 'DOUBLE'
img.scheme = 'BIP'
img.renderHdr()
return None
def runRubbersheetRange(self):
if not self.doRubbersheetingRange:
print('Rubber sheeting in azimuth not requested ... skipping')
return
# denseOffset file name computeed from cross-correlation
denseOffsetFile = os.path.join(self.insar.denseOffsetsDirname , self.insar.denseOffsetFilename)
snrFile = denseOffsetFile + "_snr.bil"
denseOffsetFile = denseOffsetFile + ".bil"
# we want the range offsets only which are the first band
band = [2]
snrThreshold = self.rubberSheetSNRThreshold
filterSize = self.rubberSheetFilterSize
filtRgOffsetFile = os.path.join(self.insar.denseOffsetsDirname, self._insar.filtRangeOffsetFilename)
# masking and median filtering the dense offsets
if not self.doRubbersheetingRange:
print('Rubber sheeting in range is off, applying SNR-masking for the offsets maps')
mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtRgOffsetFile)
else:
print('Rubber sheeting in range is on, applying a data-based offsets-masking')
mask_filterNoSNR(denseOffsetFile,filterSize,filtRgOffsetFile)
# range offsets computed from geometry
offsetsDir = self.insar.offsetsDirname
geometryRangeOffset = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
RgsheetOffset = os.path.join(offsetsDir, self.insar.rangeRubbersheetFilename)
# oversampling the filtRgOffsetFile to the same size of geometryRangeOffset
# and then update the geometryRangeOffset by adding the oversampled
# filtRgOffsetFile to it.
resampleOffset(filtRgOffsetFile, geometryRangeOffset, RgsheetOffset)
return None