Update runRubbersheetAzimuth.py

Add "from scipy import ndimage"
LT1AB
HBaldwin3 2019-12-19 19:13:43 -06:00
parent 7df2b44a0c
commit 851858b228
1 changed files with 27 additions and 27 deletions

View File

@ -22,39 +22,39 @@ def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
off_az = ds.GetRasterBand(1).ReadAsArray() off_az = ds.GetRasterBand(1).ReadAsArray()
off_rg = ds.GetRasterBand(2).ReadAsArray() off_rg = ds.GetRasterBand(2).ReadAsArray()
ds = None ds = None
# Remove missing values from ampcor # Remove missing values from ampcor
off_rg[np.where(off_rg < -9999)]=0 off_rg[np.where(off_rg < -9999)]=0
off_az[np.where(off_az < -9999)]=0 off_az[np.where(off_az < -9999)]=0
# Store the offsets in a complex variable # Store the offsets in a complex variable
off = off_rg + 1j*off_az off = off_rg + 1j*off_az
# Mask the azimuth offsets based on the MAD # Mask the azimuth offsets based on the MAD
mask = off_masking(off,filterSize,thre=3) mask = off_masking(off,filterSize,thre=3)
xoff_masked = np.ma.array(off.real,mask=mask) xoff_masked = np.ma.array(off.real,mask=mask)
yoff_masked = np.ma.array(off.imag,mask=mask) yoff_masked = np.ma.array(off.imag,mask=mask)
# Delete unused variables # Delete unused variables
mask = None mask = None
off = None off = None
# Remove residual noisy spots with a median filter on the azimuth offmap # Remove residual noisy spots with a median filter on the azimuth offmap
yoff_masked.mask = yoff_masked.mask | \ yoff_masked.mask = yoff_masked.mask | \
(ndimage.median_filter(xoff_masked.filled(fill_value=0),3) == 0) | \ (ndimage.median_filter(xoff_masked.filled(fill_value=0),3) == 0) | \
(ndimage.median_filter(yoff_masked.filled(fill_value=0),3) == 0) (ndimage.median_filter(yoff_masked.filled(fill_value=0),3) == 0)
# Fill the data by iteratively using smoothed values # Fill the data by iteratively using smoothed values
data = yoff_masked.data data = yoff_masked.data
data[yoff_masked.mask]=np.nan data[yoff_masked.mask]=np.nan
off_az_filled = fill_with_smoothed(data,filterSize) off_az_filled = fill_with_smoothed(data,filterSize)
# Apply median filter to smooth the azimuth offset map # Apply median filter to smooth the azimuth offset map
off_az_filled = ndimage.median_filter(off_az_filled,filterSize) off_az_filled = ndimage.median_filter(off_az_filled,filterSize)
# Save the filtered offsets # Save the filtered offsets
length, width = off_az_filled.shape length, width = off_az_filled.shape
@ -74,8 +74,8 @@ def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
img.scheme = 'BIP' img.scheme = 'BIP'
img.renderHdr() img.renderHdr()
return return
def off_masking(off,filterSize,thre=2): def off_masking(off,filterSize,thre=2):
@ -94,16 +94,18 @@ def fill(data, invalid=None):
""" """
Replace the value of invalid 'data' cells (indicated by 'invalid') Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell by the value of the nearest valid data cell
Input: Input:
data: numpy array of any dimension data: numpy array of any dimension
invalid: a binary array of same shape as 'data'. invalid: a binary array of same shape as 'data'.
data value are replaced where invalid is True data value are replaced where invalid is True
If None (default), use: invalid = np.isnan(data) If None (default), use: invalid = np.isnan(data)
Output: Output:
Return a filled array. Return a filled array.
""" """
from scipy import ndimage
if invalid is None: invalid = np.isnan(data) if invalid is None: invalid = np.isnan(data)
ind = ndimage.distance_transform_edt(invalid, ind = ndimage.distance_transform_edt(invalid,
@ -166,7 +168,7 @@ def fill_with_smoothed(off,filterSize):
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize) kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
loop = 0 loop = 0
cnt2=1 cnt2=1
while (cnt2!=0 & loop<100): while (cnt2!=0 & loop<100):
loop += 1 loop += 1
idx2= np.isnan(off_2filt) idx2= np.isnan(off_2filt)
@ -178,9 +180,9 @@ def fill_with_smoothed(off,filterSize):
idx3 = np.where(off_filt == 0) idx3 = np.where(off_filt == 0)
off_2filt[idx3]=np.nan off_2filt[idx3]=np.nan
off_filt=None off_filt=None
return off_2filt return off_2filt
def resampleOffset(maskedFiltOffset, geometryOffset, outName): def resampleOffset(maskedFiltOffset, geometryOffset, outName):
''' '''
Oversample offset and add. Oversample offset and add.
@ -198,7 +200,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same. ###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left ###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
###As the input images. Once that is implemente, the math here should all be consistent. ###As the input images. Once that is implemente, the math here should all be consistent.
###However, this is not too far off since the skip for doing dense offsets is generally large. ###However, this is not too far off since the skip for doing dense offsets is generally large.
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue. ###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length ) print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
@ -226,7 +228,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
for ll in range(length): for ll in range(length):
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:] val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
val.tofile(fid) val.tofile(fid)
fid.close() fid.close()
img = isceobj.createImage() img = isceobj.createImage()
@ -239,7 +241,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
img.scheme = 'BIP' img.scheme = 'BIP'
img.renderHdr() img.renderHdr()
return None return None
@ -264,7 +266,7 @@ def runRubbersheetAzimuth(self):
if not self.doRubbersheetingRange: if not self.doRubbersheetingRange:
print('Rubber sheeting in range is off, filtering the offsets with a SNR-based mask') print('Rubber sheeting in range is off, filtering the offsets with a SNR-based mask')
mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtAzOffsetFile) mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtAzOffsetFile)
else: else:
print('Rubber sheeting in range is on, filtering the offsets with data-based mask') print('Rubber sheeting in range is on, filtering the offsets with data-based mask')
mask_filterNoSNR(denseOffsetFile, filterSize, filtAzOffsetFile) mask_filterNoSNR(denseOffsetFile, filterSize, filtAzOffsetFile)
@ -274,10 +276,8 @@ def runRubbersheetAzimuth(self):
sheetOffset = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename) sheetOffset = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
# oversampling the filtAzOffsetFile to the same size of geometryAzimuthOffset # oversampling the filtAzOffsetFile to the same size of geometryAzimuthOffset
# and then update the geometryAzimuthOffset by adding the oversampled # and then update the geometryAzimuthOffset by adding the oversampled
# filtAzOffsetFile to it. # filtAzOffsetFile to it.
resampleOffset(filtAzOffsetFile, geometryAzimuthOffset, sheetOffset) resampleOffset(filtAzOffsetFile, geometryAzimuthOffset, sheetOffset)
return None return None