Added runRubbersheetRange.py and runRubbersheetAzimuth.py, changed offset filling, moved flat-Earth removal to interferogram formation when rubber sheeting in range is on
parent
794a3488c4
commit
31da6a36f4
|
@ -242,14 +242,20 @@ FILTER_STRENGTH = Application.Parameter('filterStrength',
|
||||||
mandatory=False,
|
mandatory=False,
|
||||||
doc='')
|
doc='')
|
||||||
|
|
||||||
|
############################################## Modified by V.Brancato 10.07.2019
|
||||||
DO_RUBBERSHEETING = Application.Parameter('doRubbersheeting',
|
DO_RUBBERSHEETINGAZIMUTH = Application.Parameter('doRubbersheetingAzimuth',
|
||||||
public_name='do rubbersheeting',
|
public_name='do rubbersheetingAzimuth',
|
||||||
default=False,
|
default=False,
|
||||||
type=bool,
|
type=bool,
|
||||||
mandatory=False,
|
mandatory=False,
|
||||||
doc='')
|
doc='')
|
||||||
|
DO_RUBBERSHEETINGRANGE = Application.Parameter('doRubbersheetingRange',
|
||||||
|
public_name='do rubbersheetingRange',
|
||||||
|
default=False,
|
||||||
|
type=bool,
|
||||||
|
mandatory=False,
|
||||||
|
doc='')
|
||||||
|
#################################################################################
|
||||||
RUBBERSHEET_SNR_THRESHOLD = Application.Parameter('rubberSheetSNRThreshold',
|
RUBBERSHEET_SNR_THRESHOLD = Application.Parameter('rubberSheetSNRThreshold',
|
||||||
public_name='rubber sheet SNR Threshold',
|
public_name='rubber sheet SNR Threshold',
|
||||||
default = 5.0,
|
default = 5.0,
|
||||||
|
@ -533,7 +539,8 @@ class _RoiBase(Application, FrameMixin):
|
||||||
GEOCODE_BOX,
|
GEOCODE_BOX,
|
||||||
REGION_OF_INTEREST,
|
REGION_OF_INTEREST,
|
||||||
HEIGHT_RANGE,
|
HEIGHT_RANGE,
|
||||||
DO_RUBBERSHEETING,
|
DO_RUBBERSHEETINGRANGE, #Modified by V. Brancato 10.07.2019
|
||||||
|
DO_RUBBERSHEETINGAZIMUTH, #Modified by V. Brancato 10.07.2019
|
||||||
RUBBERSHEET_SNR_THRESHOLD,
|
RUBBERSHEET_SNR_THRESHOLD,
|
||||||
RUBBERSHEET_FILTER_SIZE,
|
RUBBERSHEET_FILTER_SIZE,
|
||||||
DO_DENSEOFFSETS,
|
DO_DENSEOFFSETS,
|
||||||
|
@ -724,7 +731,8 @@ class _RoiBase(Application, FrameMixin):
|
||||||
self.runResampleSlc = StripmapProc.createResampleSlc(self)
|
self.runResampleSlc = StripmapProc.createResampleSlc(self)
|
||||||
self.runRefineSlaveTiming = StripmapProc.createRefineSlaveTiming(self)
|
self.runRefineSlaveTiming = StripmapProc.createRefineSlaveTiming(self)
|
||||||
self.runDenseOffsets = StripmapProc.createDenseOffsets(self)
|
self.runDenseOffsets = StripmapProc.createDenseOffsets(self)
|
||||||
self.runRubbersheet = StripmapProc.createRubbersheet(self)
|
self.runRubbersheetRange = StripmapProc.createRubbersheetRange(self) #Modified by V. Brancato 10.07.2019
|
||||||
|
self.runRubbersheetAzimuth =StripmapProc.createRubbersheetAzimuth(self) #Modified by V. Brancato 10.07.2019
|
||||||
self.runResampleSubbandSlc = StripmapProc.createResampleSubbandSlc(self)
|
self.runResampleSubbandSlc = StripmapProc.createResampleSubbandSlc(self)
|
||||||
self.runInterferogram = StripmapProc.createInterferogram(self)
|
self.runInterferogram = StripmapProc.createInterferogram(self)
|
||||||
self.runFilter = StripmapProc.createFilter(self)
|
self.runFilter = StripmapProc.createFilter(self)
|
||||||
|
@ -774,8 +782,11 @@ class _RoiBase(Application, FrameMixin):
|
||||||
args=('refined',))
|
args=('refined',))
|
||||||
|
|
||||||
self.step('dense_offsets', func=self.runDenseOffsets)
|
self.step('dense_offsets', func=self.runDenseOffsets)
|
||||||
|
######################################################################## Modified by V. Brancato 10.07.2019
|
||||||
|
self.step('rubber_sheet_range', func=self.runRubbersheetRange)
|
||||||
|
|
||||||
self.step('rubber_sheet', func=self.runRubbersheet)
|
self.step('rubber_sheet_azimuth',func=self.runRubbersheetAzimuth)
|
||||||
|
#########################################################################
|
||||||
|
|
||||||
self.step('fine_resample', func=self.runResampleSlc,
|
self.step('fine_resample', func=self.runResampleSlc,
|
||||||
args=('fine',))
|
args=('fine',))
|
||||||
|
@ -853,9 +864,13 @@ class _RoiBase(Application, FrameMixin):
|
||||||
# run dense offsets
|
# run dense offsets
|
||||||
self.runDenseOffsets()
|
self.runDenseOffsets()
|
||||||
|
|
||||||
|
############ Modified by V. Brancato 10.07.2019
|
||||||
# adding the azimuth offsets computed from cross correlation to geometry offsets
|
# adding the azimuth offsets computed from cross correlation to geometry offsets
|
||||||
self.runRubbersheet()
|
self.runRubbersheetAzimuth()
|
||||||
|
|
||||||
|
# adding the range offsets computed from cross correlation to geometry offsets
|
||||||
|
self.runRubbersheetRange()
|
||||||
|
####################################################################################
|
||||||
# resampling using rubbersheeted offsets
|
# resampling using rubbersheeted offsets
|
||||||
# which include geometry + constant range + constant azimuth
|
# which include geometry + constant range + constant azimuth
|
||||||
# + dense azimuth offsets
|
# + dense azimuth offsets
|
||||||
|
|
|
@ -325,7 +325,7 @@ AZIMUTH_OFFSET_FILENAME = Component.Parameter('azimuthOffsetFilename',
|
||||||
doc='')
|
doc='')
|
||||||
|
|
||||||
|
|
||||||
|
# Modified by V. Brancato 10.07.2019
|
||||||
AZIMUTH_RUBBERSHEET_FILENAME = Component.Parameter('azimuthRubbersheetFilename',
|
AZIMUTH_RUBBERSHEET_FILENAME = Component.Parameter('azimuthRubbersheetFilename',
|
||||||
public_name='azimuth Rubbersheet Image Name',
|
public_name='azimuth Rubbersheet Image Name',
|
||||||
default = 'azimuth_sheet.off',
|
default = 'azimuth_sheet.off',
|
||||||
|
@ -333,6 +333,13 @@ AZIMUTH_RUBBERSHEET_FILENAME = Component.Parameter('azimuthRubbersheetFilename',
|
||||||
mandatory=False,
|
mandatory=False,
|
||||||
doc='')
|
doc='')
|
||||||
|
|
||||||
|
RANGE_RUBBERSHEET_FILENAME = Component.Parameter('rangeRubbersheetFilename',
|
||||||
|
public_name='range Rubbersheet Image Name',
|
||||||
|
default = 'range_sheet.off',
|
||||||
|
type=str,
|
||||||
|
mandatory=False,
|
||||||
|
doc='')
|
||||||
|
# End of modification
|
||||||
MISREG_FILENAME = Component.Parameter('misregFilename',
|
MISREG_FILENAME = Component.Parameter('misregFilename',
|
||||||
public_name='misreg file name',
|
public_name='misreg file name',
|
||||||
default='misreg',
|
default='misreg',
|
||||||
|
@ -346,7 +353,7 @@ DENSE_OFFSET_FILENAME = Component.Parameter('denseOffsetFilename',
|
||||||
type=str,
|
type=str,
|
||||||
mandatory=False,
|
mandatory=False,
|
||||||
doc='file name of dense offsets computed from cross correlating two SLC images')
|
doc='file name of dense offsets computed from cross correlating two SLC images')
|
||||||
|
# Modified by V. Brancato 10.07.2019
|
||||||
FILT_AZIMUTH_OFFSET_FILENAME = Component.Parameter('filtAzimuthOffsetFilename',
|
FILT_AZIMUTH_OFFSET_FILENAME = Component.Parameter('filtAzimuthOffsetFilename',
|
||||||
public_name='filtered azimuth offset filename',
|
public_name='filtered azimuth offset filename',
|
||||||
default='filtAzimuth.off',
|
default='filtAzimuth.off',
|
||||||
|
@ -354,6 +361,13 @@ FILT_AZIMUTH_OFFSET_FILENAME = Component.Parameter('filtAzimuthOffsetFilename',
|
||||||
mandatory=False,
|
mandatory=False,
|
||||||
doc='Filtered azimuth dense offsets')
|
doc='Filtered azimuth dense offsets')
|
||||||
|
|
||||||
|
FILT_RANGE_OFFSET_FILENAME = Component.Parameter('filtRangeOffsetFilename',
|
||||||
|
public_name='filtered range offset filename',
|
||||||
|
default='filtRange.off',
|
||||||
|
type=str,
|
||||||
|
mandatory=False,
|
||||||
|
doc='Filtered range dense offsets')
|
||||||
|
# End of modification
|
||||||
DISPERSIVE_FILENAME = Component.Parameter('dispersiveFilename',
|
DISPERSIVE_FILENAME = Component.Parameter('dispersiveFilename',
|
||||||
public_name = 'dispersive phase filename',
|
public_name = 'dispersive phase filename',
|
||||||
default='dispersive.bil',
|
default='dispersive.bil',
|
||||||
|
@ -470,8 +484,10 @@ class StripmapProc(Component, FrameMixin):
|
||||||
LOS_FILENAME,
|
LOS_FILENAME,
|
||||||
RANGE_OFFSET_FILENAME,
|
RANGE_OFFSET_FILENAME,
|
||||||
AZIMUTH_OFFSET_FILENAME,
|
AZIMUTH_OFFSET_FILENAME,
|
||||||
AZIMUTH_RUBBERSHEET_FILENAME,
|
AZIMUTH_RUBBERSHEET_FILENAME, # Added by V. Brancato 10.07.2019
|
||||||
FILT_AZIMUTH_OFFSET_FILENAME,
|
RANGE_RUBBERSHEET_FILENAME, # Added by V. Brancato 10.07.2019
|
||||||
|
FILT_AZIMUTH_OFFSET_FILENAME, # Added by V. Brancato 10.07.2019
|
||||||
|
FILT_RANGE_OFFSET_FILENAME, # Added by V. Brancato 10.07.2019
|
||||||
DENSE_OFFSET_FILENAME,
|
DENSE_OFFSET_FILENAME,
|
||||||
MISREG_FILENAME,
|
MISREG_FILENAME,
|
||||||
DISPERSIVE_FILENAME,
|
DISPERSIVE_FILENAME,
|
||||||
|
|
|
@ -23,7 +23,7 @@ def runResampleSlc(self, kind='coarse'):
|
||||||
raise Exception('Unknown operation type {0} in runResampleSlc'.format(kind))
|
raise Exception('Unknown operation type {0} in runResampleSlc'.format(kind))
|
||||||
|
|
||||||
if kind == 'fine':
|
if kind == 'fine':
|
||||||
if not self.doRubbersheeting:
|
if not (self.doRubbersheetingRange | self.doRubbersheetingAzimuth): # Modified by V. Brancato 10.10.2019
|
||||||
print('Rubber sheeting not requested, skipping resampling ....')
|
print('Rubber sheeting not requested, skipping resampling ....')
|
||||||
return
|
return
|
||||||
|
|
||||||
|
@ -68,11 +68,24 @@ def runResampleSlc(self, kind='coarse'):
|
||||||
#Since the app is based on geometry module we expect pixel-by-pixel offset
|
#Since the app is based on geometry module we expect pixel-by-pixel offset
|
||||||
#field
|
#field
|
||||||
offsetsDir = self.insar.offsetsDirname
|
offsetsDir = self.insar.offsetsDirname
|
||||||
rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
|
|
||||||
|
# Modified by V. Brancato 10.10.2019
|
||||||
|
#rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
|
||||||
|
|
||||||
if kind in ['coarse', 'refined']:
|
if kind in ['coarse', 'refined']:
|
||||||
azname = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename)
|
azname = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename)
|
||||||
|
rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
|
||||||
else:
|
else:
|
||||||
azname = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
|
azname = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
|
||||||
|
if self.doRubbersheetingRange:
|
||||||
|
print('Rubbersheeting in range is turned on, taking the cross-correlation offsets')
|
||||||
|
print('Setting Flattening to False')
|
||||||
|
rgname = os.path.join(offsetsDir, self.insar.rangeRubbersheetFilename)
|
||||||
|
flatten=False
|
||||||
|
else:
|
||||||
|
print('Rubbersheeting in range is turned off, taking range geometric offsets')
|
||||||
|
rgname = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
|
||||||
|
flatten=True
|
||||||
|
|
||||||
rngImg = isceobj.createImage()
|
rngImg = isceobj.createImage()
|
||||||
rngImg.load(rgname + '.xml')
|
rngImg.load(rgname + '.xml')
|
||||||
|
@ -85,8 +98,8 @@ def runResampleSlc(self, kind='coarse'):
|
||||||
width = rngImg.getWidth()
|
width = rngImg.getWidth()
|
||||||
length = rngImg.getLength()
|
length = rngImg.getLength()
|
||||||
|
|
||||||
|
# Modified by V. Brancato 10.10.2019
|
||||||
flatten = True
|
#flatten = True
|
||||||
rObj.flatten = flatten
|
rObj.flatten = flatten
|
||||||
rObj.outputWidth = width
|
rObj.outputWidth = width
|
||||||
rObj.outputLines = length
|
rObj.outputLines = length
|
||||||
|
|
|
@ -0,0 +1,255 @@
|
||||||
|
#
|
||||||
|
# Author: Heresh Fattahi
|
||||||
|
# Copyright 2017
|
||||||
|
#
|
||||||
|
|
||||||
|
import isce
|
||||||
|
import isceobj
|
||||||
|
from osgeo import gdal
|
||||||
|
from scipy import ndimage
|
||||||
|
from astropy.convolution import convolve
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
|
||||||
|
def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
|
||||||
|
# Masking the offsets with a data-based approach
|
||||||
|
|
||||||
|
# Open the offsets
|
||||||
|
ds = gdal.Open(denseOffsetFile+'.vrt',gdal.GA_ReadOnly)
|
||||||
|
off_az = ds.GetRasterBand(1).ReadAsArray()
|
||||||
|
off_rg = ds.GetRasterBand(2).ReadAsArray()
|
||||||
|
ds = None
|
||||||
|
|
||||||
|
off_az_copy = np.copy(off_az)
|
||||||
|
off_rg_copy = np.copy(off_rg)
|
||||||
|
|
||||||
|
# Compute MAD
|
||||||
|
off_azm = ndimage.median_filter(off_az,filterSize)
|
||||||
|
off_rgm = ndimage.median_filter(off_rg,filterSize)
|
||||||
|
|
||||||
|
metric_rg = np.abs(off_rgm-off_rg)
|
||||||
|
metric_az = np.abs(off_azm-off_az)
|
||||||
|
|
||||||
|
idx = np.where((metric_rg > 3) | (metric_az > 3))
|
||||||
|
|
||||||
|
|
||||||
|
# Remove missing values in the offset map
|
||||||
|
off_az_copy[np.where(off_az_copy<-9999)]=np.nan
|
||||||
|
off_az_copy[idx]=np.nan
|
||||||
|
|
||||||
|
|
||||||
|
# Fill the offsets with smoothed values
|
||||||
|
off_az_filled = fill_with_smoothed(off_az_copy,filterSize)
|
||||||
|
|
||||||
|
# Median filter the offsets
|
||||||
|
off_az_filled = ndimage.median_filter(off_az_filled,filterSize)
|
||||||
|
|
||||||
|
# Save the filtered offsets
|
||||||
|
length, width = off_az_filled.shape
|
||||||
|
|
||||||
|
# writing the masked and filtered offsets to a file
|
||||||
|
print ('writing masked and filtered offsets to: ', outName)
|
||||||
|
|
||||||
|
##Write array to offsetfile
|
||||||
|
off_az_filled.tofile(outName)
|
||||||
|
|
||||||
|
# write the xml file
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(outName)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'FLOAT'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def fill(data, invalid=None):
|
||||||
|
"""
|
||||||
|
Replace the value of invalid 'data' cells (indicated by 'invalid')
|
||||||
|
by the value of the nearest valid data cell
|
||||||
|
|
||||||
|
Input:
|
||||||
|
data: numpy array of any dimension
|
||||||
|
invalid: a binary array of same shape as 'data'.
|
||||||
|
data value are replaced where invalid is True
|
||||||
|
If None (default), use: invalid = np.isnan(data)
|
||||||
|
|
||||||
|
Output:
|
||||||
|
Return a filled array.
|
||||||
|
"""
|
||||||
|
if invalid is None: invalid = np.isnan(data)
|
||||||
|
|
||||||
|
ind = ndimage.distance_transform_edt(invalid,
|
||||||
|
return_distances=False,
|
||||||
|
return_indices=True)
|
||||||
|
return data[tuple(ind)]
|
||||||
|
|
||||||
|
|
||||||
|
def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName):
|
||||||
|
#masking and Filtering
|
||||||
|
|
||||||
|
##Read in the offset file
|
||||||
|
ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly)
|
||||||
|
Offset = ds.GetRasterBand(band).ReadAsArray()
|
||||||
|
ds = None
|
||||||
|
|
||||||
|
##Read in the SNR file
|
||||||
|
ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly)
|
||||||
|
snr = ds.GetRasterBand(1).ReadAsArray()
|
||||||
|
ds = None
|
||||||
|
|
||||||
|
# Masking the dense offsets based on SNR
|
||||||
|
print ('masking the dense offsets with SNR threshold: ', snrThreshold)
|
||||||
|
Offset[snr<snrThreshold]=np.nan
|
||||||
|
|
||||||
|
# Fill the masked region using valid neighboring pixels
|
||||||
|
Offset = fill(Offset)
|
||||||
|
############
|
||||||
|
|
||||||
|
# Median filtering the masked offsets
|
||||||
|
print ('Filtering with median filter with size : ', filterSize)
|
||||||
|
Offset = ndimage.median_filter(Offset, size=filterSize)
|
||||||
|
length, width = Offset.shape
|
||||||
|
|
||||||
|
# writing the masked and filtered offsets to a file
|
||||||
|
print ('writing masked and filtered offsets to: ', outName)
|
||||||
|
|
||||||
|
##Write array to offsetfile
|
||||||
|
Offset.tofile(outName)
|
||||||
|
|
||||||
|
# write the xml file
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(outName)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'FLOAT'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def fill_with_smoothed(off,filterSize):
|
||||||
|
|
||||||
|
off_2filt=np.copy(off)
|
||||||
|
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
|
||||||
|
loop = 0
|
||||||
|
cnt2=1
|
||||||
|
|
||||||
|
while (loop<100):
|
||||||
|
loop += 1
|
||||||
|
idx2= np.isnan(off_2filt)
|
||||||
|
cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt)))
|
||||||
|
|
||||||
|
if cnt2 != 0:
|
||||||
|
off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate')
|
||||||
|
off_2filt[idx2]=off_filt[idx2]
|
||||||
|
idx3 = np.where(off_filt == 0)
|
||||||
|
off_2filt[idx3]=np.nan
|
||||||
|
off_filt=None
|
||||||
|
|
||||||
|
return off_2filt
|
||||||
|
|
||||||
|
def resampleOffset(maskedFiltOffset, geometryOffset, outName):
|
||||||
|
'''
|
||||||
|
Oversample offset and add.
|
||||||
|
'''
|
||||||
|
from imageMath import IML
|
||||||
|
import logging
|
||||||
|
|
||||||
|
resampledOffset = maskedFiltOffset + ".resampled"
|
||||||
|
|
||||||
|
inimg = isceobj.createImage()
|
||||||
|
inimg.load(geometryOffset + '.xml')
|
||||||
|
length = inimg.getLength()
|
||||||
|
width = inimg.getWidth()
|
||||||
|
|
||||||
|
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
|
||||||
|
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
|
||||||
|
###As the input images. Once that is implemente, the math here should all be consistent.
|
||||||
|
###However, this is not too far off since the skip for doing dense offsets is generally large.
|
||||||
|
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
|
||||||
|
|
||||||
|
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
|
||||||
|
cmd = 'gdal_translate -of ENVI -ot Float64 -outsize ' + str(width) + ' ' + str(length) + ' ' + maskedFiltOffset + '.vrt ' + resampledOffset
|
||||||
|
print(cmd)
|
||||||
|
os.system(cmd)
|
||||||
|
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(resampledOffset)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setLength(length)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'DOUBLE'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
|
||||||
|
###Adding the geometry offset and oversampled offset
|
||||||
|
geomoff = IML.mmapFromISCE(geometryOffset, logging)
|
||||||
|
osoff = IML.mmapFromISCE(resampledOffset, logging)
|
||||||
|
|
||||||
|
fid = open(outName, 'w')
|
||||||
|
|
||||||
|
for ll in range(length):
|
||||||
|
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
|
||||||
|
val.tofile(fid)
|
||||||
|
|
||||||
|
fid.close()
|
||||||
|
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(outName)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setLength(length)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'DOUBLE'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def runRubbersheetAzimuth(self):
|
||||||
|
|
||||||
|
if not self.doRubbersheetingAzimuth:
|
||||||
|
print('Rubber sheeting in azimuth not requested ... skipping')
|
||||||
|
return
|
||||||
|
|
||||||
|
# denseOffset file name computeed from cross-correlation
|
||||||
|
denseOffsetFile = os.path.join(self.insar.denseOffsetsDirname , self.insar.denseOffsetFilename)
|
||||||
|
snrFile = denseOffsetFile + "_snr.bil"
|
||||||
|
denseOffsetFile = denseOffsetFile + ".bil"
|
||||||
|
|
||||||
|
# we want the azimuth offsets only which are the first band
|
||||||
|
band = [1]
|
||||||
|
snrThreshold = self.rubberSheetSNRThreshold
|
||||||
|
filterSize = self.rubberSheetFilterSize
|
||||||
|
filtAzOffsetFile = os.path.join(self.insar.denseOffsetsDirname, self._insar.filtAzimuthOffsetFilename)
|
||||||
|
|
||||||
|
# masking and median filtering the dense offsets
|
||||||
|
if not self.doRubbersheetingRange:
|
||||||
|
print('Rubber sheeting in range is off, filtering the offsets with a SNR-based mask')
|
||||||
|
mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtAzOffsetFile)
|
||||||
|
else:
|
||||||
|
print('Rubber sheeting in range is on, filtering the offsets with data-based mask')
|
||||||
|
mask_filterNoSNR(denseOffsetFile, filterSize, filtAzOffsetFile)
|
||||||
|
|
||||||
|
# azimuth offsets computed from geometry
|
||||||
|
offsetsDir = self.insar.offsetsDirname
|
||||||
|
geometryAzimuthOffset = os.path.join(offsetsDir, self.insar.azimuthOffsetFilename)
|
||||||
|
sheetOffset = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
|
||||||
|
|
||||||
|
# oversampling the filtAzOffsetFile to the same size of geometryAzimuthOffset
|
||||||
|
# and then update the geometryAzimuthOffset by adding the oversampled
|
||||||
|
# filtAzOffsetFile to it.
|
||||||
|
resampleOffset(filtAzOffsetFile, geometryAzimuthOffset, sheetOffset)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,257 @@
|
||||||
|
#
|
||||||
|
# Author: Heresh Fattahi
|
||||||
|
# Copyright 2017
|
||||||
|
#
|
||||||
|
# Modified by V. Brancato (10.12.2019)
|
||||||
|
# Including offset filtering with no SNR masking
|
||||||
|
#
|
||||||
|
|
||||||
|
import isce
|
||||||
|
import isceobj
|
||||||
|
from osgeo import gdal
|
||||||
|
from scipy import ndimage
|
||||||
|
from astropy.convolution import convolve
|
||||||
|
import numpy as np
|
||||||
|
import os
|
||||||
|
|
||||||
|
def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
|
||||||
|
# Masking the offsets with a data-based approach
|
||||||
|
|
||||||
|
# Open the offsets
|
||||||
|
ds = gdal.Open(denseOffsetFile+'.vrt',gdal.GA_ReadOnly)
|
||||||
|
off_az = ds.GetRasterBand(1).ReadAsArray()
|
||||||
|
off_rg = ds.GetRasterBand(2).ReadAsArray()
|
||||||
|
ds = None
|
||||||
|
|
||||||
|
off_az_copy = np.copy(off_az)
|
||||||
|
off_rg_copy = np.copy(off_rg)
|
||||||
|
|
||||||
|
# Compute MAD
|
||||||
|
off_azm = ndimage.median_filter(off_az,filterSize)
|
||||||
|
off_rgm = ndimage.median_filter(off_rg,filterSize)
|
||||||
|
|
||||||
|
metric_rg = np.abs(off_rgm-off_rg)
|
||||||
|
metric_az = np.abs(off_azm-off_az)
|
||||||
|
|
||||||
|
|
||||||
|
idx = np.where((metric_rg > 3) | (metric_az > 3))
|
||||||
|
|
||||||
|
# Remove missing data in the offset map
|
||||||
|
off_rg_copy[np.where(off_rg_copy<-9999)]=np.nan
|
||||||
|
off_rg_copy[idx]=np.nan
|
||||||
|
|
||||||
|
# Fill the offsets with smoothed values
|
||||||
|
off_rg_filled = fill_with_smoothed(off_rg_copy,filterSize)
|
||||||
|
|
||||||
|
# Median filter the offsets
|
||||||
|
off_rg_filled = ndimage.median_filter(off_rg_filled,filterSize)
|
||||||
|
|
||||||
|
# Save the filtered offsets
|
||||||
|
length, width = off_rg_filled.shape
|
||||||
|
|
||||||
|
# writing the masked and filtered offsets to a file
|
||||||
|
print ('writing masked and filtered offsets to: ', outName)
|
||||||
|
|
||||||
|
##Write array to offsetfile
|
||||||
|
off_rg_filled.tofile(outName)
|
||||||
|
|
||||||
|
# write the xml file
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(outName)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'FLOAT'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def fill(data, invalid=None):
|
||||||
|
"""
|
||||||
|
Replace the value of invalid 'data' cells (indicated by 'invalid')
|
||||||
|
by the value of the nearest valid data cell
|
||||||
|
|
||||||
|
Input:
|
||||||
|
data: numpy array of any dimension
|
||||||
|
invalid: a binary array of same shape as 'data'.
|
||||||
|
data value are replaced where invalid is True
|
||||||
|
If None (default), use: invalid = np.isnan(data)
|
||||||
|
|
||||||
|
Output:
|
||||||
|
Return a filled array.
|
||||||
|
"""
|
||||||
|
if invalid is None: invalid = np.isnan(data)
|
||||||
|
|
||||||
|
ind = ndimage.distance_transform_edt(invalid,
|
||||||
|
return_distances=False,
|
||||||
|
return_indices=True)
|
||||||
|
return data[tuple(ind)]
|
||||||
|
|
||||||
|
def fill_with_smoothed(off,filterSize):
|
||||||
|
|
||||||
|
off_2filt=np.copy(off)
|
||||||
|
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
|
||||||
|
loop = 0
|
||||||
|
cnt2=1
|
||||||
|
|
||||||
|
while (loop<100):
|
||||||
|
loop += 1
|
||||||
|
idx2= np.isnan(off_2filt)
|
||||||
|
cnt2 = np.sum(np.count_nonzero(np.isnan(off_2filt)))
|
||||||
|
|
||||||
|
if cnt2 != 0:
|
||||||
|
off_filt= convolve(off_2filt,kernel,boundary='extend',nan_treatment='interpolate')
|
||||||
|
off_2filt[idx2]=off_filt[idx2]
|
||||||
|
idx3 = np.where(off_filt == 0)
|
||||||
|
off_2filt[idx3]=np.nan
|
||||||
|
off_filt=None
|
||||||
|
|
||||||
|
return off_2filt
|
||||||
|
|
||||||
|
def mask_filter(denseOffsetFile, snrFile, band, snrThreshold, filterSize, outName):
|
||||||
|
#masking and Filtering
|
||||||
|
|
||||||
|
##Read in the offset file
|
||||||
|
ds = gdal.Open(denseOffsetFile + '.vrt', gdal.GA_ReadOnly)
|
||||||
|
Offset = ds.GetRasterBand(band).ReadAsArray()
|
||||||
|
ds = None
|
||||||
|
|
||||||
|
##Read in the SNR file
|
||||||
|
ds = gdal.Open(snrFile + '.vrt', gdal.GA_ReadOnly)
|
||||||
|
snr = ds.GetRasterBand(1).ReadAsArray()
|
||||||
|
ds = None
|
||||||
|
|
||||||
|
# Masking the dense offsets based on SNR
|
||||||
|
print ('masking the dense offsets with SNR threshold: ', snrThreshold)
|
||||||
|
Offset[snr<snrThreshold]=np.nan
|
||||||
|
|
||||||
|
# Fill the masked region using valid neighboring pixels
|
||||||
|
Offset = fill(Offset)
|
||||||
|
############
|
||||||
|
|
||||||
|
# Median filtering the masked offsets
|
||||||
|
print ('Filtering with median filter with size : ', filterSize)
|
||||||
|
Offset = ndimage.median_filter(Offset, size=filterSize)
|
||||||
|
length, width = Offset.shape
|
||||||
|
|
||||||
|
# writing the masked and filtered offsets to a file
|
||||||
|
print ('writing masked and filtered offsets to: ', outName)
|
||||||
|
|
||||||
|
##Write array to offsetfile
|
||||||
|
Offset.tofile(outName)
|
||||||
|
|
||||||
|
# write the xml file
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(outName)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'FLOAT'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def resampleOffset(maskedFiltOffset, geometryOffset, outName):
|
||||||
|
'''
|
||||||
|
Oversample offset and add.
|
||||||
|
'''
|
||||||
|
from imageMath import IML
|
||||||
|
import logging
|
||||||
|
|
||||||
|
resampledOffset = maskedFiltOffset + ".resampled"
|
||||||
|
|
||||||
|
inimg = isceobj.createImage()
|
||||||
|
inimg.load(geometryOffset + '.xml')
|
||||||
|
length = inimg.getLength()
|
||||||
|
width = inimg.getWidth()
|
||||||
|
|
||||||
|
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
|
||||||
|
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
|
||||||
|
###As the input images. Once that is implemente, the math here should all be consistent.
|
||||||
|
###However, this is not too far off since the skip for doing dense offsets is generally large.
|
||||||
|
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
|
||||||
|
|
||||||
|
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
|
||||||
|
cmd = 'gdal_translate -of ENVI -ot Float64 -outsize ' + str(width) + ' ' + str(length) + ' ' + maskedFiltOffset + '.vrt ' + resampledOffset
|
||||||
|
print(cmd)
|
||||||
|
os.system(cmd)
|
||||||
|
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(resampledOffset)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setLength(length)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'DOUBLE'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
|
||||||
|
###Adding the geometry offset and oversampled offset
|
||||||
|
geomoff = IML.mmapFromISCE(geometryOffset, logging)
|
||||||
|
osoff = IML.mmapFromISCE(resampledOffset, logging)
|
||||||
|
|
||||||
|
fid = open(outName, 'w')
|
||||||
|
|
||||||
|
for ll in range(length):
|
||||||
|
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
|
||||||
|
val.tofile(fid)
|
||||||
|
|
||||||
|
fid.close()
|
||||||
|
|
||||||
|
img = isceobj.createImage()
|
||||||
|
img.setFilename(outName)
|
||||||
|
img.setWidth(width)
|
||||||
|
img.setLength(length)
|
||||||
|
img.setAccessMode('READ')
|
||||||
|
img.bands = 1
|
||||||
|
img.dataType = 'DOUBLE'
|
||||||
|
img.scheme = 'BIP'
|
||||||
|
img.renderHdr()
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def runRubbersheetRange(self):
|
||||||
|
|
||||||
|
if not self.doRubbersheetingRange:
|
||||||
|
print('Rubber sheeting in azimuth not requested ... skipping')
|
||||||
|
return
|
||||||
|
|
||||||
|
# denseOffset file name computeed from cross-correlation
|
||||||
|
denseOffsetFile = os.path.join(self.insar.denseOffsetsDirname , self.insar.denseOffsetFilename)
|
||||||
|
snrFile = denseOffsetFile + "_snr.bil"
|
||||||
|
denseOffsetFile = denseOffsetFile + ".bil"
|
||||||
|
|
||||||
|
# we want the range offsets only which are the first band
|
||||||
|
band = [2]
|
||||||
|
snrThreshold = self.rubberSheetSNRThreshold
|
||||||
|
filterSize = self.rubberSheetFilterSize
|
||||||
|
filtRgOffsetFile = os.path.join(self.insar.denseOffsetsDirname, self._insar.filtRangeOffsetFilename)
|
||||||
|
|
||||||
|
# masking and median filtering the dense offsets
|
||||||
|
if not self.doRubbersheetingRange:
|
||||||
|
print('Rubber sheeting in range is off, applying SNR-masking for the offsets maps')
|
||||||
|
mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtRgOffsetFile)
|
||||||
|
else:
|
||||||
|
print('Rubber sheeting in range is on, applying a data-based offsets-masking')
|
||||||
|
mask_filterNoSNR(denseOffsetFile,filterSize,filtRgOffsetFile)
|
||||||
|
|
||||||
|
# range offsets computed from geometry
|
||||||
|
offsetsDir = self.insar.offsetsDirname
|
||||||
|
geometryRangeOffset = os.path.join(offsetsDir, self.insar.rangeOffsetFilename)
|
||||||
|
RgsheetOffset = os.path.join(offsetsDir, self.insar.rangeRubbersheetFilename)
|
||||||
|
|
||||||
|
# oversampling the filtRgOffsetFile to the same size of geometryRangeOffset
|
||||||
|
# and then update the geometryRangeOffset by adding the oversampled
|
||||||
|
# filtRgOffsetFile to it.
|
||||||
|
resampleOffset(filtRgOffsetFile, geometryRangeOffset, RgsheetOffset)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue