Merge branch 'master' into cmake

LT1AB
Ryan Burns 2020-01-30 22:28:54 -08:00
commit 7274dbd2a4
18 changed files with 518 additions and 457 deletions

View File

@ -265,7 +265,7 @@ RUBBERSHEET_SNR_THRESHOLD = Application.Parameter('rubberSheetSNRThreshold',
RUBBERSHEET_FILTER_SIZE = Application.Parameter('rubberSheetFilterSize',
public_name='rubber sheet filter size',
default = 8,
default = 9,
type = int,
mandatory = False,
doc = '')

View File

@ -62,7 +62,7 @@ def estimateOffsetField(master, slave, denseOffsetFileName,
else:
objOffset.setImageDataType2('real')
objOffset.offsetImageName = denseOffsetFileName + '.bil'
objOffset.snrImageName = denseOffsetFileName +'_snr.bil'
objOffset.covImageName = denseOffsetFileName +'_cov.bil'
@ -75,11 +75,11 @@ def estimateOffsetField(master, slave, denseOffsetFileName,
def runDenseOffsets(self):
if self.doDenseOffsets or self.doRubbersheeting:
if self.doDenseOffsets or self.doRubbersheetingAzimuth:
if self.doDenseOffsets:
print('Dense offsets explicitly requested')
if self.doRubbersheeting:
if self.doRubbersheetingAzimuth:
print('Generating offsets as rubber sheeting requested')
else:
return
@ -96,7 +96,7 @@ def runDenseOffsets(self):
os.makedirs(dirname)
denseOffsetFilename = os.path.join(dirname , self.insar.denseOffsetFilename)
field = estimateOffsetField(masterSlc, slaveSlc, denseOffsetFilename,
ww = self.denseWindowWidth,
wh = self.denseWindowHeight,
@ -107,5 +107,5 @@ def runDenseOffsets(self):
self._insar.offset_top = field[0]
self._insar.offset_left = field[1]
return None

View File

@ -56,7 +56,7 @@ def compute_FlatEarth(self,ifgFilename,width,length,radarWavelength):
# Open the interferogram
#ifgFilename= os.path.join(self.insar.ifgDirname, self.insar.ifgFilename)
intf = np.memmap(ifgFilename+'.full',dtype=np.complex64,mode='r+',shape=(length,width))
intf = np.memmap(ifgFilename,dtype=np.complex64,mode='r+',shape=(length,width))
for ll in range(length):
intf[ll,:] *= np.exp(cJ*fact*rng2[ll,:])
@ -155,10 +155,13 @@ def generateIgram(self,imageSlc1, imageSlc2, resampName, azLooks, rgLooks,radarW
else:
resampAmp += '.amp'
resampInt = resampName
if not self.doRubbersheetingRange:
resampInt = resampName
else:
resampInt = resampName + ".full"
objInt = isceobj.createIntImage()
objInt.setFilename(resampInt+'.full')
objInt.setFilename(resampInt)
objInt.setWidth(intWidth)
imageInt = isceobj.createIntImage()
IU.copyAttributes(objInt, imageInt)
@ -166,7 +169,7 @@ def generateIgram(self,imageSlc1, imageSlc2, resampName, azLooks, rgLooks,radarW
objInt.createImage()
objAmp = isceobj.createAmpImage()
objAmp.setFilename(resampAmp+'.full')
objAmp.setFilename(resampAmp)
objAmp.setWidth(intWidth)
imageAmp = isceobj.createAmpImage()
IU.copyAttributes(objAmp, imageAmp)
@ -196,8 +199,8 @@ def generateIgram(self,imageSlc1, imageSlc2, resampName, azLooks, rgLooks,radarW
compute_FlatEarth(self,resampInt,intWidth,lines,radarWavelength)
# Perform Multilook
multilook(resampInt+'.full', outname=resampInt, alks=azLooks, rlks=rgLooks) #takeLooks(objAmp,azLooks,rgLooks)
multilook(resampAmp+'.full', outname=resampAmp, alks=azLooks, rlks=rgLooks) #takeLooks(objInt,azLooks,rgLooks)
multilook(resampInt, outname=resampName, alks=azLooks, rlks=rgLooks) #takeLooks(objAmp,azLooks,rgLooks)
multilook(resampAmp, outname=resampAmp.replace(".full",""), alks=azLooks, rlks=rgLooks) #takeLooks(objInt,azLooks,rgLooks)
#os.system('rm ' + resampInt+'.full* ' + resampAmp + '.full* ')
# End of modification

View File

@ -13,13 +13,13 @@ def fill(data, invalid=None):
"""
Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell
Input:
data: numpy array of any dimension
invalid: a binary array of same shape as 'data'.
data value are replaced where invalid is True
If None (default), use: invalid = np.isnan(data)
Output:
Return a filled array.
"""
@ -97,7 +97,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
###As the input images. Once that is implemente, the math here should all be consistent.
###However, this is not too far off since the skip for doing dense offsets is generally large.
###However, this is not too far off since the skip for doing dense offsets is generally large.
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
@ -125,7 +125,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
for ll in range(length):
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
val.tofile(fid)
fid.close()
img = isceobj.createImage()
@ -138,13 +138,13 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
img.scheme = 'BIP'
img.renderHdr()
return None
def runRubbersheet(self):
if not self.doRubbersheeting:
if not self.doRubbersheetingAzimuth:
print('Rubber sheeting not requested ... skipping')
return
@ -168,11 +168,9 @@ def runRubbersheet(self):
sheetOffset = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
# oversampling the filtAzOffsetFile to the same size of geometryAzimuthOffset
# and then update the geometryAzimuthOffset by adding the oversampled
# and then update the geometryAzimuthOffset by adding the oversampled
# filtAzOffsetFile to it.
resampleOffset(filtAzOffsetFile, geometryAzimuthOffset, sheetOffset)
print("I'm here")
return None

View File

@ -22,39 +22,39 @@ def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
off_az = ds.GetRasterBand(1).ReadAsArray()
off_rg = ds.GetRasterBand(2).ReadAsArray()
ds = None
# Remove missing values from ampcor
# Remove missing values from ampcor
off_rg[np.where(off_rg < -9999)]=0
off_az[np.where(off_az < -9999)]=0
# Store the offsets in a complex variable
off = off_rg + 1j*off_az
# Mask the azimuth offsets based on the MAD
mask = off_masking(off,filterSize,thre=3)
mask = off_masking(off,filterSize,thre=3)
xoff_masked = np.ma.array(off.real,mask=mask)
yoff_masked = np.ma.array(off.imag,mask=mask)
# Delete unused variables
mask = None
off = None
# Remove residual noisy spots with a median filter on the azimuth offmap
yoff_masked.mask = yoff_masked.mask | \
(ndimage.median_filter(xoff_masked.filled(fill_value=0),3) == 0) | \
(ndimage.median_filter(yoff_masked.filled(fill_value=0),3) == 0)
# Fill the data by iteratively using smoothed values
# Fill the data by iteratively using smoothed values
data = yoff_masked.data
data[yoff_masked.mask]=np.nan
off_az_filled = fill_with_smoothed(data,filterSize)
# Apply median filter to smooth the azimuth offset map
off_az_filled = ndimage.median_filter(off_az_filled,filterSize)
# Save the filtered offsets
length, width = off_az_filled.shape
@ -74,8 +74,8 @@ def mask_filterNoSNR(denseOffsetFile,filterSize,outName):
img.scheme = 'BIP'
img.renderHdr()
return
return
def off_masking(off,filterSize,thre=2):
@ -94,16 +94,18 @@ def fill(data, invalid=None):
"""
Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell
Input:
data: numpy array of any dimension
invalid: a binary array of same shape as 'data'.
data value are replaced where invalid is True
If None (default), use: invalid = np.isnan(data)
Output:
Return a filled array.
"""
from scipy import ndimage
if invalid is None: invalid = np.isnan(data)
ind = ndimage.distance_transform_edt(invalid,
@ -166,7 +168,7 @@ def fill_with_smoothed(off,filterSize):
kernel = np.ones((filterSize,filterSize),np.float32)/(filterSize*filterSize)
loop = 0
cnt2=1
while (cnt2!=0 & loop<100):
loop += 1
idx2= np.isnan(off_2filt)
@ -178,9 +180,9 @@ def fill_with_smoothed(off,filterSize):
idx3 = np.where(off_filt == 0)
off_2filt[idx3]=np.nan
off_filt=None
return off_2filt
def resampleOffset(maskedFiltOffset, geometryOffset, outName):
'''
Oversample offset and add.
@ -198,7 +200,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
###Currently making the assumption that top left of dense offsets and interfeorgrams are the same.
###This is not true for now. We need to update DenseOffsets to have the ability to have same top left
###As the input images. Once that is implemente, the math here should all be consistent.
###However, this is not too far off since the skip for doing dense offsets is generally large.
###However, this is not too far off since the skip for doing dense offsets is generally large.
###The offset is not too large to worry about right now. If the skip is decreased, this could be an issue.
print('oversampling the filtered and masked offsets to the width and length:', width, ' ', length )
@ -226,7 +228,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
for ll in range(length):
val = geomoff.bands[0][ll,:] + osoff.bands[0][ll,:]
val.tofile(fid)
fid.close()
img = isceobj.createImage()
@ -239,7 +241,7 @@ def resampleOffset(maskedFiltOffset, geometryOffset, outName):
img.scheme = 'BIP'
img.renderHdr()
return None
@ -264,7 +266,7 @@ def runRubbersheetAzimuth(self):
if not self.doRubbersheetingRange:
print('Rubber sheeting in range is off, filtering the offsets with a SNR-based mask')
mask_filter(denseOffsetFile, snrFile, band[0], snrThreshold, filterSize, filtAzOffsetFile)
else:
else:
print('Rubber sheeting in range is on, filtering the offsets with data-based mask')
mask_filterNoSNR(denseOffsetFile, filterSize, filtAzOffsetFile)
@ -274,10 +276,8 @@ def runRubbersheetAzimuth(self):
sheetOffset = os.path.join(offsetsDir, self.insar.azimuthRubbersheetFilename)
# oversampling the filtAzOffsetFile to the same size of geometryAzimuthOffset
# and then update the geometryAzimuthOffset by adding the oversampled
# and then update the geometryAzimuthOffset by adding the oversampled
# filtAzOffsetFile to it.
resampleOffset(filtAzOffsetFile, geometryAzimuthOffset, sheetOffset)
return None

View File

@ -1,17 +0,0 @@
To use the TOPS or Stripmap stack processors you need to:
1- Install ISCE as usual
2- Depending on which stack processor you need to try, add the path of the folder containing the python scripts to your $PATH environment variable as follows:
- to use the topsStack for processing a stack of Sentinel-1 tops data add the full path of your "contrib/stack/topsStack" to your $PATH environemnt variable
- to use the stripmapStack for processing a stack of Stripmap data, add the full path of your "contrib/stack/stripmapStack" to your $PATH environemnt variableu
NOTE:
The stack processors do not show up in the install directory of your isce software. They can be found in the isce source directory.
Important Note:
There might be conflicts between topsStack and stripmapStack scripts (due to comman names of different scripts). Therefore users MUST only have the path of one stack processor in their $PATH environment at a time, to avoid conflicts between the two stack processors.

34
contrib/stack/README.md Normal file
View File

@ -0,0 +1,34 @@
## Stack Processors
Read the document for each stack processor for details.
+ [stripmapStack](./stripmapStack/README.md)
+ [topsStack](./topsStack/README.md)
### Installation
To use the TOPS or Stripmap stack processors you need to:
1. Install ISCE as usual
2. Depending on which stack processor you need to try, add the path of the folder containing the python scripts to your `$PATH` environment variable as follows:
- add the full path of your **contrib/stack/topsStack** to `$PATH` to use the topsStack for processing a stack of Sentinel-1 TOPS data
- add the full path of your **contrib/stack/stripmapStack** to `$PATH` to use the stripmapStack for processing a stack of StripMap data
Note: The stack processors do not show up in the install directory of your isce software. They can be found in the isce source directory.
#### Important Note: ####
There might be conflicts between topsStack and stripmapStack scripts (due to comman names of different scripts). Therefore users **MUST only** have the path of **one stack processor in their $PATH environment at a time**, to avoid conflicts between the two stack processors.
### References
Users who use the stack processors may refer to the following literatures:
For StripMap stack processor and ionospheric phase estimation:
+ H. Fattahi, M. Simons, and P. Agram, "InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique", IEEE Trans. Geosci. Remote Sens., vol. 55, no. 10, 5984-5996, 2017. (https://ieeexplore.ieee.org/abstract/document/7987747/)
For TOPS stack processing:
+ H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/)

View File

@ -1,64 +0,0 @@
The detailed algorithms for stack processing of stripmap data can be found here:
H. Fattahi, M. Simons, and P. Agram, "InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique", IEEE Trans. Geosci. Remote Sens., vol. 55, no. 10, 5984-5996, 2017. (https://ieeexplore.ieee.org/abstract/document/7987747/)
-----------------------------------
Notes on stripmap stack processor:
Here are some notes to get started with processing stacks of stripmap data with ISCE.
1- create a folder somewhere for your project
mkdir MauleT111
cd MauleT111
2- create a DEM:
dem.py -a stitch -b -37 -31 -72 -69 -r -s 1 -c
3- Keep only ".dem.wgs84", ".dem.wgs84.vrt" and ".dem.wgs84.xml" and remove unnecessary files
4- fix the path of the file in the xml file of the DEM by using this command:
fixImageXml.py -f -i demLat_S37_S31_Lon_W072_W069.dem.wgs84
5- create a folder to download the ALOS-1 data from ASF:
mkdir download
cd download
6- Download the data that that you want to process to the downlowd directory.
7- once all data have been downloaded, we need to unzip them and move them to different folders and getting ready for unpacking and then SLC generation.
This can be done by running the following command in a directory above "download":
prepRawALOS.py -i download/ -o SLC
This command generates an empty SLC folder and a run file called: "run_unPackALOS".
You could also run prepRawSensor.py which aims to recognize the sensor data automatically followed by running the sensor specific preparation script. For now we include support for ALOS and CSK raw data, but it is trivial to expand and include other sensors as unpacking routines are already included in the distribution.
prepRawSensor.py -i download/ -o SLC
8- execute the commands inside run_unPackALOS file. If you have a cluster that you can submit jobs, you can submit each line of command to a processor. The commands are independent and can be run in parallel.
9- After successfully running the previous step, you should see acquisition dates in the SLC folder and the ".raw" files for each acquisition
Note: For ALOS-1, If there is an acquisition that does not include .raw file, this is most likely due to PRF change between frames and can not be currently handled by ISCE. You have to ignore those.
10- run stackStripmap.py which will generate many config and run files that need to be executed. Here is an example:
stackStripMap.py -s SLC/ -d demLat_S37_S31_Lon_W072_W069.dem.wgs84 -t 250 -b 1000 -a 14 -r 4 -u snaphu
This will produce:
a) baseline folder, which contains baseline information
b) pairs.png which is a baseline-time plot of the network of interferograms
c) configs: which contains the configuration parameter to run different InSAR processing steps
d) run_files: a folder that includes several run and job files that needs to be run in order
11- execute the commands in run files (run_1, run_2, etc) in the run_files folder

View File

@ -1,117 +0,0 @@
The detailed algorithm for stack processing of TOPS data can be find here:
H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/)
<<<<<< Sentinel-1 TOPS stack processor >>>>>>
To use the sentinel stack processor, make sure to add the path of your "contrib/stack/topsStack" folder to your $PATH environment varibale.
The scripts provides support for Sentinel-1 TOPS stack processing. Currently supported workflows include a coregistered stack of SLC, interferograms, offsets, and coherence.
stackSentinel.py generates all configuration and run files required to be executed on a stack of Sentinel-1 TOPS data. When stackSentinel.py is executed for a given workflow (-W option) a “configs” and “run_files” folder is generated. No processing is performed at this stage. Within the “run_files” folder different run_#_description files are contained which are to be executed as shell scripts in the run number order. Each of these run scripts call specific configure files contained in the “configs” folder which call ISCE in a modular fashion. The configure and run files will change depending on the selected workflow. To make run_# files executable, change the file permission accordingly (e.g., chmod +x run_1_unpack_slc).
To see workflow examples, type “stackSentinel.py -H”
To get an overview of all the configurable parameters, type “stackSentinel.py -h”
Required parameters of stackSentinel.py include:
-s SLC_DIRNAME A folder with downloaded Sentinel-1 SLCs.
-o ORBIT_DIRNAME A folder containing the Sentinel-1 orbits.
Missing orbit files will be downloaded automatically
-a AUX_DIRNAME A folder containing the Sentinel-1 Auxiliary files
-d DEM A DEM (Digital Elevation Model) referenced to wgs84
In the following, different workflow examples are provided. Note that stackSentinel.py only generates the run and configure files. To perform the actual processing, the user will need to execute each run file in their numbered order.
In all workflows, coregistration (-C option) can be done using only geometry (set option = geometry) or with geometry plus refined azimuth offsets through NESD (set option = NESD) approach, the latter being the default. For the NESD coregistrstion the user can control the ESD coherence threshold (-e option) and the number of overlap interferograms (-O) to be used in NESD estimation.
------------------------------ Example 1: Coregistered stack of SLC ----------------------------
Generate the run and configure files needed to generate a coregistered stack of SLCs.
In this example, a pre-defined bounding box is specified. Note, if the bounding box is not provided it is set by default to the common SLC area among all SLCs. We recommend that user always set the processing bounding box. Since ESA does not have a fixed frame definition, we suggest to download data for a larger bounding box compared to the actual bounding box used in stackSentinel.py. This way user can ensure to have required data to cover the region of interest. Here is an example command to create configuration files for a stack of SLCs:
stackSentinel.py -s ../SLC/ -d ../DEM/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -a ../../AuxDir/ -o ../../Orbits -b '19 20 -99.5 -98.5' -W slc
by running the command above, the configs and run_files folders are created. User needs to execute each run file in order. The order is specified by the index number of the run file name. For the example above, the run_files folder includes the following files:
- run_1_unpack_slc_topo_master
- run_2_average_baseline
- run_3_extract_burst_overlaps
- run_4_overlap_geo2rdr_resample
- run_5_pairs_misreg
- run_6_timeseries_misreg
- run_7_geo2rdr_resample
- run_8_extract_stack_valid_region
- run_9_merge
- run_10_grid_baseline
The generated run files are self descriptive. Below is a short explanation on what each run_file does:
***run_1_unpack_slc_topo_master:***
Includes commands to unpack Sentinel-1 TOPS SLCs using ISCE readers. For older SLCs which need antenna elevation pattern correction, the file is extracted and written to disk. For newer version of SLCs which dont need the elevation antenna pattern correction, only a gdal virtual “vrt” file (and isce xml file) is generated. The “.vrt” file points to the Sentinel SLC file and reads them whenever required during the processing. If a user wants to write the “.vrt” SLC file to disk, it can be done easily using gdal_translate (e.g. gdal_translate of ENVI File.vrt File.slc).
The “run_1_unpack_slc_topo_master” also includes a command that refers to the config file of the stack master, which includes configuration for running topo for the stack master. Note that in the pair-wise processing strategy one should run topo (mapping from range-Doppler to geo coordinate) for all pairs. However, with stackSentinel, topo needs to be run only one time for the master in the stack.
***run_2_average_baseline: ***
Computes average baseline for the stack. These baselines are not used for processing anywhere. They are only an approximation and can be used for plotting purposes. A more precise baseline grid is estimated later in run_10.
***run_3_extract_burst_overlaps: ***
Burst overlaps are extracted for estimating azimuth misregistration using NESD technique. If coregistration method is chosen to be “geometry”, then this run file wont exist and the overlaps are not extracted.
***run_4_overlap_geo2rdr_resample: ***
Running geo2rdr to estimate geometrical offsets between slave burst overlaps and the stack master burst overlaps. The slave burst overlaps are then resampled to the stack master burst overlaps.
***run_5_pairs_misreg: ***
Using the coregistered stack burst overlaps generated from the previous step, differential overlap interferograms are generated and are used for estimating azimuth misregistration using Enhanced Spectral Diversity (ESD) technique.
***run_6_timeseries_misreg: ***
A time-series of azimuth and range misregistration is estimated with respect to the stack master. The time-series is a least squares esatimation from the pair misregistration from the previous step.
***run_7_geo2rdr_resample: ***
Using orbit and DEM, geometrical offsets among all slave SLCs and the stack master is computed. The goometrical offsets, together with the misregistration time-series (from previous step) are used for precise coregistration of each burst SLC.
***run_8_extract_stack_valid_region: ***
The valid region between burst SLCs at the overlap area of the bursts slightly changes for different acquisitions. Therefore we need to keep track of these overlaps which will be used during merging bursts. Without these knowledge, lines of invalid data may appear in the merged products at the burst overlaps.
***run_9_merge: ***
Merges all bursts for the master and coregistered SLCs. The geometry files are also merged including longitude, latitude, shadow and layer mask, line-of-sight files, etc. .
***run_10_grid_baseline: ***
A coarse grid of baselines between each slave SLC and the stack master is generated. This is not used in any computation.
-------- Example 2: Coregistered stack of SLC with modified parameters -----------
In the following example, the same stack generation is requested but where the threshold of the default coregistration approach (NESD) is relaxed from its default 0.85 value 0.7.
stackSentinel.py -s ../SLC/ -d ../DEM/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -a ../../AuxDir/ -o ../../Orbits -b '19 20 -99.5 -98.5' -W slc -e 0.7
When running all the run files, the final products are located in the merge folder which has subdirectories “geom_master”, “baselines” and “SLC”. The “geom_master” folder contains geometry products such as longitude, latitude, height, local incidence angle, look angle, heading, and shadowing/layover mask files. The “baselines” folder contains sparse grids of the perpendicular baseline for each acquisition, while the “SLC” folder contains the coregistered SLCs
------------------------------ Example 3: Stack of interferograms ------------------------------
Generate the run and configure files needed to generate a stack of interferograms.
In this example, a stack of interferograms is requested for which up to 2 nearest neighbor connections are included.
stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -c 2
In the following example, all possible interferograms are being generated and in which the coregistration approach is set to use geometry and not the default NESD.
stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C geometry -c all
When executing all the run files, a coregistered stack of slcs are produced, the burst interferograms are generated and then merged. Merged interferograms are multilooked, filtered and unwrapped. Geocoding is not applied. If users need to geocode any product, they can use the geocodeGdal.py script.
-------------------- Example 4: Correlation stack example ----------------------------
Generate the run and configure files needed to generate a stack of coherence.
In this example, a correlation stack is requested considering all possible coherence pairs and where the coregistration approach is done using geometry only.
stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C geometry -c all -W correlation
This workflow is basically similar to the previous one. The difference is that the interferograms are not unwrapped.
----------------------------------- DEM download example -----------------------------------
Download of DEM (need to use wgs84 version) using the ISCE DEM download script.
dem.py -a stitch -b 18 20 -100 -97 -r -s 1 c
Updating DEMs wgs84 xml to include full path to the DEM
fixImageXml.py -f -i demLat_N18_N20_Lon_W100_W097.dem.wgs84

View File

@ -1,11 +0,0 @@
Users who use the stack processors may refer to the following literatures:
for stripmap stack processor and ionospheric phase estimation:
H. Fattahi, M. Simons, and P. Agram, "InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique", IEEE Trans. Geosci. Remote Sens., vol. 55, no. 10, 5984-5996, 2017. (https://ieeexplore.ieee.org/abstract/document/7987747/)
For TOPS stack processing:
H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/)

View File

@ -0,0 +1,85 @@
## StripMap stack processor
The detailed algorithms and workflow for stack processing of stripmap SAR data can be found here:
+ Fattahi, H., M. Simons, and P. Agram (2017), InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique, IEEE Transactions on Geoscience and Remote Sensing, 55(10), 5984-5996, doi:[10.1109/TGRS.2017.2718566](https://ieeexplore.ieee.org/abstract/document/7987747/).
-----------------------------------
To use the stripmap stack processor, make sure to add the path of your `contrib/stack/stripmapStack` folder to your `$PATH` environment varibale.
Currently supported workflows include a coregistered stack of SLC, interferograms, ionospheric delays.
Here are some notes to get started with processing stacks of stripmap data with ISCE.
#### 1. Create your project folder somewhere
```
mkdir MauleAlosDT111
cd MauleAlosDT111
```
#### 2. Prepare DEM
a) create a folder for DEM;
b) create a DEM using dem.py with SNWE of your study area in integer;
d) Keep only ".dem.wgs84", ".dem.wgs84.vrt" and ".dem.wgs84.xml" and remove unnecessary files;
d) fix the path of the file in the xml file of the DEM by using fixImageXml.py.
```
mkdir DEM; cd DEM
dem.py -a stitch -b -37 -31 -72 -69 -r -s 1 -c
rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt
fixImageXml.py -f -i demLat*.dem.wgs84
cd ..
```
#### 3. Download data
##### 3.1 create a folder to download SAR data (i.e. ALOS-1 data from ASF)
```
mkdir download
cd download
```
##### 3.2 Download the data that that you want to process to the "downlowd" directory
#### 4. Prepare SAR data
Once all data have been downloaded, we need to unzip them and move them to different folders and getting ready for unpacking and then SLC generation. This can be done by running the following command in a directory above "download":
```
prepRawALOS.py -i download/ -o SLC
```
This command generates an empty SLC folder and a run file called: "run_unPackALOS".
You could also run prepRawSensor.py which aims to recognize the sensor data automatically followed by running the sensor specific preparation script. For now we include support for ALOS and CSK raw data, but it is trivial to expand and include other sensors as unpacking routines are already included in the distribution.
```
prepRawSensor.py -i download/ -o SLC
```
#### 5. Execute the commands in "run_unPackALOS" file
If you have a cluster that you can submit jobs, you can submit each line of command to a processor. The commands are independent and can be run in parallel.
After successfully running the previous step, you should see acquisition dates in the SLC folder and the ".raw" files for each acquisition.
Note: For ALOS-1, If there is an acquisition that does not include .raw file, this is most likely due to PRF change between frames and can not be currently handled by ISCE. You have to ignore those.
#### 6. Run "stackStripmap.py"
This will generate many config and run files that need to be executed. Here is an example:
```
stackStripMap.py -s SLC/ -d DEM/demLat*.dem.wgs84 -t 250 -b 1000 -a 14 -r 4 -u snaphu
```
This will produce:
a) baseline folder, which contains baseline information
b) pairs.png which is a baseline-time plot of the network of interferograms
c) configs: which contains the configuration parameter to run different InSAR processing steps
d) run_files: a folder that includes several run and job files that needs to be run in order
#### 7. Execute the commands in run files (run_1*, run_2*, etc) in the "run_files" folder

View File

@ -1,64 +0,0 @@
The detailed algorithms for stack processing of stripmap data can be found here:
H. Fattahi, M. Simons, and P. Agram, "InSAR Time-Series Estimation of the Ionospheric Phase Delay: An Extension of the Split Range-Spectrum Technique", IEEE Trans. Geosci. Remote Sens., vol. 55, no. 10, 5984-5996, 2017. (https://ieeexplore.ieee.org/abstract/document/7987747/)
-----------------------------------
Notes on stripmap stack processor:
Here are some notes to get started with processing stacks of stripmap data with ISCE.
1- create a folder somewhere for your project
mkdir MauleT111
cd MauleT111
2- create a DEM:
dem.py -a stitch -b -37 -31 -72 -69 -r -s 1 -c
3- Keep only ".dem.wgs84", ".dem.wgs84.vrt" and ".dem.wgs84.xml" and remove unnecessary files
4- fix the path of the file in the xml file of the DEM by using this command:
fixImageXml.py -f -i demLat_S37_S31_Lon_W072_W069.dem.wgs84
5- create a folder to download the ALOS-1 data from ASF:
mkdir download
cd download
6- Download the data that that you want to process to the downlowd directory.
7- once all data have been downloaded, we need to unzip them and move them to different folders and getting ready for unpacking and then SLC generation.
This can be done by running the following command in a directory above "download":
prepRawALOS.py -i download/ -o SLC
This command generates an empty SLC folder and a run file called: "run_unPackALOS".
You could also run prepRawSensor.py which aims to recognize the sensor data automatically followed by running the sensor specific preparation script. For now we include support for ALOS and CSK raw data, but it is trivial to expand and include other sensors as unpacking routines are already included in the distribution.
prepRawSensor.py -i download/ -o SLC
8- execute the commands inside run_unPackALOS file. If you have a cluster that you can submit jobs, you can submit each line of command to a processor. The commands are independent and can be run in parallel.
9- After successfully running the previous step, you should see acquisition dates in the SLC folder and the ".raw" files for each acquisition
Note: For ALOS-1, If there is an acquisition that does not include .raw file, this is most likely due to PRF change between frames and can not be currently handled by ISCE. You have to ignore those.
10- run stackStripmap.py which will generate many config and run files that need to be executed. Here is an example:
stackStripMap.py -s SLC/ -d demLat_S37_S31_Lon_W072_W069.dem.wgs84 -t 250 -b 1000 -a 14 -r 4 -u snaphu
This will produce:
a) baseline folder, which contains baseline information
b) pairs.png which is a baseline-time plot of the network of interferograms
c) configs: which contains the configuration parameter to run different InSAR processing steps
d) run_files: a folder that includes several run and job files that needs to be run in order
11- execute the commands in run files (run_1, run_2, etc) in the run_files folder

View File

@ -65,6 +65,8 @@ class config(object):
self.f.write('master : ' + self.slcDir +'\n')
self.f.write('dem : ' + self.dem +'\n')
self.f.write('output : ' + self.geometryDir +'\n')
self.f.write('alks : ' + self.alks +'\n')
self.f.write('rlks : ' + self.rlks +'\n')
if self.nativeDoppler:
self.f.write('native : True\n')
if self.useGPU:
@ -72,7 +74,18 @@ class config(object):
else:
self.f.write('useGPU : False\n')
self.f.write('##########################'+'\n')
def createWaterMask(self, function):
self.f.write('##########################'+'\n')
self.f.write(function+'\n')
self.f.write('createWaterMask : '+'\n')
self.f.write('dem_file : ' + self.dem +'\n')
self.f.write('lat_file : ' + self.latFile +'\n')
self.f.write('lon_file : ' + self.lonFile +'\n')
self.f.write('output : ' + self.waterMaskFile + '\n')
self.f.write('##########################'+'\n')
def geo2rdr(self, function):
self.f.write('##########################'+'\n')
@ -309,8 +322,7 @@ class run(object):
self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n')
def master_focus_split_geometry(self, stackMaster, config_prefix, split=False, focus=True, native=True):
########
# focusing master and producing geometry files
"""focusing master and producing geometry files"""
configName = os.path.join(self.configDir, config_prefix + stackMaster)
configObj = config(configName)
configObj.configure(self)
@ -327,11 +339,19 @@ class run(object):
counter += 1
if split:
configObj.slc = os.path.join(configObj.slcDir,stackMaster+self.raw_string+'.slc')
configObj.outDir = configObj.slcDir
configObj.shelve = os.path.join(configObj.slcDir, 'data')
configObj.splitRangeSpectrum('[Function-{0}]'.format(counter))
configObj.slc = os.path.join(configObj.slcDir,stackMaster+self.raw_string+'.slc')
configObj.outDir = configObj.slcDir
configObj.shelve = os.path.join(configObj.slcDir, 'data')
configObj.splitRangeSpectrum('[Function-{0}]'.format(counter))
counter += 1
# generate water mask in radar coordinates
configObj.latFile = os.path.join(self.workDir, 'geom_master/lat.rdr')
configObj.lonFile = os.path.join(self.workDir, 'geom_master/lon.rdr')
configObj.waterMaskFile = os.path.join(self.workDir, 'geom_master/waterMask.rdr')
configObj.createWaterMask('[Function-{0}]'.format(counter))
counter += 1
configObj.finalize()
del configObj
self.runf.write(self.text_cmd+'stripmapWrapper.py -c '+ configName+'\n')

View File

@ -2,30 +2,45 @@
#Author: Heresh Fattahi
import isce
import isceobj
from contrib.demUtils.SWBDStitcher import SWBDStitcher
from iscesys.DataManager import createManager
import os
import argparse
import configparser
from numpy import round
import numpy as np
import isce
import isceobj
from iscesys.DataManager import createManager
from contrib.demUtils.SWBDStitcher import SWBDStitcher
EXAMPLE = """example:
createWaterMask.py -b 31 33 130 132
createWaterMask.py -b 31 33 130 132 -l lat.rdr -L lon.rdr -o waterMask.rdr
createWaterMask.py -d ../DEM/demLat_N31_N33_Lon_E130_E132.dem.wgs84 -l lat.rdr -L lon.rdr -o waterMask.rdr
"""
def createParser():
'''
Create command line parser.
'''
parser = argparse.ArgumentParser( description='extracts the overlap geometry between master bursts')
# parser.add_argument('-b', '--bbox', dest='bbox', type=str, default=None,
# help='Lat/Lon Bounding SNWE')
parser.add_argument('-b', '--bbox', type = int, default = None, nargs = '+', dest = 'bbox', help = 'Defines the spatial region in the format south north west east.\
The values should be integers from (-90,90) for latitudes and (0,360) or (-180,180) for longitudes.')
parser = argparse.ArgumentParser(description='Create water body mask in geo and/or radar coordinates',
formatter_class=argparse.RawTextHelpFormatter,
epilog=EXAMPLE)
parser.add_argument('-b', '--bbox', dest='bbox', type=int, default=None, nargs=4, metavar=('S','N','W','E'),
help = 'Defines the spatial region in the format south north west east.\n'
'The values should be integers from (-90,90) for latitudes '
'and (0,360) or (-180,180) for longitudes.')
parser.add_argument('-d','--dem_file', dest='demName', type=str, default=None,
help='DEM file in geo coordinates, i.e. demLat*.dem.wgs84.')
parser.add_argument('-l', '--lat_file', dest='latName', type=str, default=None,
help='pixel by pixel lat file in radar coordinate')
parser.add_argument('-L', '--lon_file', dest='lonName', type=str, default=None,
help='pixel by pixel lat file in radar coordinate')
parser.add_argument('-o', '--output', dest='outfile', type=str,
help='output filename of water mask in radar coordinates')
return parser
def cmdLineParse(iargs = None):
'''
Command line parser.
@ -33,37 +48,69 @@ def cmdLineParse(iargs = None):
parser = createParser()
inps = parser.parse_args(args=iargs)
#inps.bbox = [int(round(val)) for val in inps.bbox.split()]
if not inps.bbox and not inps.demName:
parser.print_usage()
raise SystemExit('ERROR: no --bbox/--dem_file input, at least one is required.')
if not inps.outfile and (inps.latName and inps.lonName):
inps.outfile = os.path.join(os.path.dirname(inps.latName), 'waterMask.rdr')
return inps
def download_waterMask(inps):
def dem2bbox(dem_file):
"""Grab bbox from DEM file in geo coordinates"""
demImage = isceobj.createDemImage()
demImage.load(dem_file + '.xml')
demImage.setAccessMode('read')
N = demImage.getFirstLatitude()
W = demImage.getFirstLongitude()
S = N + demImage.getDeltaLatitude() * demImage.getLength()
E = W + demImage.getDeltaLongitude() * demImage.getWidth()
bbox = [np.floor(S).astype(int), np.ceil(N).astype(int),
np.floor(W).astype(int), np.ceil(E).astype(int)]
return bbox
def download_waterMask(bbox, dem_file):
out_dir = os.getcwd()
# update out_dir and/or bbox if dem_file is input
if dem_file:
out_dir = os.path.dirname(dem_file)
if not bbox:
bbox = dem2bbox(dem_file)
sw = createManager('wbd')
sw.configure()
inps.waterBodyGeo = sw.defaultName(inps.bbox)
#inps.waterBodyGeo = sw.defaultName(inps.bbox)
sw.outputFile = os.path.join(out_dir, sw.defaultName(bbox))
sw._noFilling = False
#sw._fillingValue = -1.0
sw._fillingValue = 0.0
sw.stitch(inps.bbox[0:2],inps.bbox[2:])
sw._fillingValue = -1.0 #fill pixels without DEM data with value of -1, same as water body
#sw._fillingValue = 0.0
sw.stitch(bbox[0:2], bbox[2:])
return sw.outputFile
return inps
def geo2radar(inps):
inps.waterBodyRadar = inps.waterBodyGeo + '.rdr'
def geo2radar(geo_file, rdr_file, lat_file, lon_file):
#inps.waterBodyRadar = inps.waterBodyGeo + '.rdr'
sw = SWBDStitcher()
sw.toRadar(inps.waterBodyGeo, inps.latName, inps.lonName, inps.waterBodyRadar)
sw.toRadar(geo_file, lat_file, lon_file, rdr_file)
return rdr_file
#looks.py -i watermask.msk -r 4 -a 14 -o 'waterMask.14alks_4rlks.msk'
#imageMath.py -e='a*b' --a=filt_20100911_20101027.int --b=watermask.14alks_4rlks.msk -o filt_20100911_20101027_masked.int -t cfloat -s BIL
def main(iargs=None):
inps = cmdLineParse(iargs)
inps = download_waterMask(inps)
if inps.latName and inps.lonName:
inps = geo2radar(inps)
inps = cmdLineParse(iargs)
geo_file = download_waterMask(inps.bbox, inps.demName)
if inps.latName and inps.lonName:
geo2radar(geo_file, inps.outfile, inps.latName, inps.lonName)
return
if __name__ == '__main__' :
'''

View File

@ -5,7 +5,7 @@
import os, imp, sys, glob
import argparse
import configparser
import datetime
import datetime
import numpy as np
import shelve
import isce
@ -20,7 +20,7 @@ defoMax = '2'
maxNodes = 72
def createParser():
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for stack processing of Sentinel data')
parser = argparse.ArgumentParser( description='Preparing the directory structure and config files for stack processing of StripMap data')
parser.add_argument('-s', '--slc_directory', dest='slcDir', type=str, required=True,
help='Directory with all stripmap SLCs')
@ -31,7 +31,7 @@ def createParser():
help='Working directory ')
parser.add_argument('-d', '--dem', dest='dem', type=str, required=True,
help='Directory with the DEM (.xml and .vrt files)')
help='DEM file (with .xml and .vrt files)')
parser.add_argument('-m', '--master_date', dest='masterDate', type=str, default=None,
help='Directory with master acquisition')
@ -43,47 +43,54 @@ def createParser():
help='Baseline threshold (max bperp in meters)')
parser.add_argument('-a', '--azimuth_looks', dest='alks', type=str, default='10',
help='Number of looks in azimuth (automaticly computed as AspectR*looks when "S" or "sensor" is defined to give approximately square multi-look pixels)')
help='Number of looks in azimuth (automaticly computed as AspectR*looks when '
'"S" or "sensor" is defined to give approximately square multi-look pixels)')
parser.add_argument('-r', '--range_looks', dest='rlks', type=str, default='10',
help='Number of looks in range')
parser.add_argument('-S', '--sensor', dest='sensor', type=str, required=False,
help='SAR sensor used to define square multi-look pixels')
parser.add_argument('-L', '--low_band_frequency', dest='fL', type=str, default=None,
help='low band frequency')
parser.add_argument('-H', '--high_band_frequency', dest='fH', type=str, default=None,
help='high band frequency')
parser.add_argument('-B', '--subband_bandwidth ', dest='bandWidth', type=str, default=None,
help='sub-band band width')
parser.add_argument('-u', '--unw_method', dest='unwMethod', type=str, default='snaphu'
, help='unwrapping method (icu, snaphu, or snaphu2stage)')
parser.add_argument('-u', '--unw_method', dest='unwMethod', type=str, default='snaphu',
help='unwrapping method (icu, snaphu, or snaphu2stage)')
parser.add_argument('-f','--filter_strength', dest='filtStrength', type=str, default=filtStrength,
help='strength of Goldstein filter applied to the wrapped phase before spatial coherence estimation.'
' Default: {}'.format(filtStrength))
parser.add_argument('--filter_sigma_x', dest='filterSigmaX', type=str, default='100'
, help='filter sigma for gaussian filtering the dispersive and nonDispersive phase')
iono = parser.add_argument_group('Ionosphere', 'Configurationas for ionospheric correction')
iono.add_argument('-L', '--low_band_frequency', dest='fL', type=str, default=None,
help='low band frequency')
iono.add_argument('-H', '--high_band_frequency', dest='fH', type=str, default=None,
help='high band frequency')
iono.add_argument('-B', '--subband_bandwidth ', dest='bandWidth', type=str, default=None,
help='sub-band band width')
parser.add_argument('--filter_sigma_y', dest='filterSigmaY', type=str, default='100.0',
help='sigma of the gaussian filter in Y direction, default=100')
iono.add_argument('--filter_sigma_x', dest='filterSigmaX', type=str, default='100',
help='filter sigma for gaussian filtering the dispersive and nonDispersive phase')
parser.add_argument('--filter_size_x', dest='filterSizeX', type=str, default='800.0',
help='size of the gaussian kernel in X direction, default = 800')
iono.add_argument('--filter_sigma_y', dest='filterSigmaY', type=str, default='100.0',
help='sigma of the gaussian filter in Y direction, default=100')
parser.add_argument('--filter_size_y', dest='filterSizeY', type=str, default='800.0',
help='size of the gaussian kernel in Y direction, default=800')
iono.add_argument('--filter_size_x', dest='filterSizeX', type=str, default='800.0',
help='size of the gaussian kernel in X direction, default = 800')
parser.add_argument('--filter_kernel_rotation', dest='filterKernelRotation', type=str, default='0.0',
help='rotation angle of the filter kernel in degrees (default = 0.0)')
iono.add_argument('--filter_size_y', dest='filterSizeY', type=str, default='800.0',
help='size of the gaussian kernel in Y direction, default=800')
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='slc'
, help='The InSAR processing workflow : (slc, interferogram, ionosphere)')
iono.add_argument('--filter_kernel_rotation', dest='filterKernelRotation', type=str, default='0.0',
help='rotation angle of the filter kernel in degrees (default = 0.0)')
parser.add_argument('-z', '--zero', dest='zerodop', action='store_true', default=False, help='Use zero doppler geometry for processing - Default : No')
parser.add_argument('--nofocus', dest='nofocus', action='store_true', default=False, help='If input data is already focused to SLCs - Default : do focus')
parser.add_argument('-c', '--text_cmd', dest='text_cmd', type=str, default=''
, help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;')
parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False, help='Allow App to use GPU when available')
parser.add_argument('-W', '--workflow', dest='workflow', type=str, default='slc',
help='The InSAR processing workflow : (slc, interferogram, ionosphere)')
parser.add_argument('-z', '--zero', dest='zerodop', action='store_true', default=False,
help='Use zero doppler geometry for processing - Default : No')
parser.add_argument('--nofocus', dest='nofocus', action='store_true', default=False,
help='If input data is already focused to SLCs - Default : do focus')
parser.add_argument('-c', '--text_cmd', dest='text_cmd', type=str, default='',
help='text command to be added to the beginning of each line of the run files. Example : source ~/.bash_profile;')
parser.add_argument('-useGPU', '--useGPU', dest='useGPU',action='store_true', default=False,
help='Allow App to use GPU when available')
parser.add_argument('--summary', dest='summary', action='store_true', default=False, help='Show summary only')
return parser

View File

@ -1,13 +1,16 @@
#!/usr/bin/env python3
import os
import argparse
import shelve
import datetime
import shutil
import numpy as np
import isce
import isceobj
import numpy as np
import shelve
import os
import datetime
from isceobj.Constants import SPEED_OF_LIGHT
from isceobj.Util.Poly2D import Poly2D
from mroipac.looks.Looks import Looks
def createParser():
'''
@ -45,7 +48,7 @@ class Dummy(object):
def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
from isceobj.Planet.Planet import Planet
from zerodop.GPUtopozero.GPUtopozero import PyTopozero
from isceobj import Constants as CN
@ -81,14 +84,14 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
omethod = 2 # LEGENDRE INTERPOLATION
else:
omethod = 0 # HERMITE INTERPOLATION
# tracking doppler specifications
if nativedop and (dop is not None):
try:
coeffs = dop._coeffs
except:
coeffs = dop
polyDoppler = Poly2D()
polyDoppler.setWidth(width)
polyDoppler.setLength(length)
@ -106,10 +109,10 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
polyDoppler.createPoly2D()
# dem
# dem
demImage.setCaster('read','FLOAT')
demImage.createImage()
# slant range file
slantRangeImage = Poly2D()
slantRangeImage.setWidth(width)
@ -127,12 +130,12 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
dataType = 'DOUBLE'
latImage.initImage(latFilename,accessMode,width,dataType)
latImage.createImage()
# lon file
lonImage = isceobj.createImage()
lonImage.initImage(lonFilename,accessMode,width,dataType)
lonImage.createImage()
# LOS file
losImage = isceobj.createImage()
dataType = 'FLOAT'
@ -141,7 +144,7 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
losImage.initImage(losFilename,accessMode,width,dataType,bands=bands,scheme=scheme)
losImage.setCaster('write','DOUBLE')
losImage.createImage()
# height file
heightImage = isceobj.createImage()
dataType = 'DOUBLE'
@ -155,7 +158,7 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
incImage.initImage(incFilename,accessMode,width,dataType,bands=bands,scheme=scheme)
incImage.createImage()
incImagePtr = incImage.getImagePointer()
maskImage = isceobj.createImage()
dataType = 'BYTE'
bands = 1
@ -165,7 +168,7 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
else:
incImagePtr = 0
maskImagePtr = 0
# initalize planet
elp = Planet(pname='Earth').ellipsoid
@ -211,14 +214,14 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
topo.set_orbitBasis(1) # Is this ever different?
topo.createOrbit() # Initializes the empty orbit to the right allocated size
count = 0
for sv in info.orbit.stateVectors.list:
td = DTU.seconds_since_midnight(sv.getTime())
pos = sv.getPosition()
vel = sv.getVelocity()
topo.set_orbitVector(count,td,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2])
count += 1
# run topo
topo.runTopo()
@ -241,13 +244,13 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
# los file
descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform.
Channel 1: Incidence angle measured from vertical at target (always +ve).
Channel 1: Incidence angle measured from vertical at target (always +ve).
Channel 2: Azimuth angle measured from North in Anti-clockwise direction.'''
losImage.setImageType('bil')
losImage.addDescription(descr)
losImage.finalizeImage()
losImage.renderHdr()
# dem/ height file
demImage.finalizeImage()
@ -256,7 +259,7 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
descr = '''Two channel angle file.
Channel 1: Angle between ray to target and the vertical at the sensor
Channel 2: Local incidence angle accounting for DEM slope at target'''
incImage.addDescription(descr)
incImage.finalizeImage()
incImage.renderHdr()
@ -265,7 +268,7 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
maskImage.addDescription(descr)
maskImage.finalizeImage()
maskImage.renderHdr()
if slantRangeImage:
try:
slantRangeImage.finalizeImage()
@ -273,7 +276,7 @@ def runTopoGPU(info, demImage, dop=None, nativedop=False, legendre=False):
pass
def runTopoCPU(info, demImage, dop=None,
def runTopoCPU(info, demImage, dop=None,
nativedop=False, legendre=False):
from zerodop.topozero import createTopozero
from isceobj.Planet.Planet import Planet
@ -295,7 +298,7 @@ def runTopoCPU(info, demImage, dop=None,
topo.numberRangeLooks = info.numberRangeLooks
topo.numberAzimuthLooks = info.numberAzimuthLooks
topo.lookSide = info.lookSide
topo.sensingStart = info.sensingStart + datetime.timedelta(seconds = ((info.numberAzimuthLooks - 1) /2) / info.prf)
topo.sensingStart = info.sensingStart + datetime.timedelta(seconds = ((info.numberAzimuthLooks - 1) /2) / info.prf)
topo.rangeFirstSample = info.rangeFirstSample + ((info.numberRangeLooks - 1)/2) * info.slantRangePixelSpacing
topo.demInterpolationMethod='BIQUINTIC'
@ -328,9 +331,10 @@ def runTopoCPU(info, demImage, dop=None,
topo.topo()
return
def runSimamp(outdir, hname='z.rdr'):
from iscesys.StdOEL.StdOELPy import create_writer
#####Run simamp
stdWriter = create_writer("log","",True,filename='sim.log')
objShade = isceobj.createSimamplitude()
@ -354,6 +358,86 @@ def runSimamp(outdir, hname='z.rdr'):
simImage.renderHdr()
hgtImage.finalizeImage()
simImage.finalizeImage()
return
def runMultilook(in_dir, out_dir, alks, rlks):
print('generate multilooked geometry files with alks={} and rlks={}'.format(alks, rlks))
from iscesys.Parsers.FileParserFactory import createFileParser
FP = createFileParser('xml')
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
print('create directory: {}'.format(out_dir))
for fbase in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask', 'waterMask']:
fname = '{}.rdr'.format(fbase)
in_file = os.path.join(in_dir, fname)
out_file = os.path.join(out_dir, fname)
if os.path.isfile(in_file):
xmlProp = FP.parse(in_file+'.xml')[0]
if('image_type' in xmlProp and xmlProp['image_type'] == 'dem'):
inImage = isceobj.createDemImage()
else:
inImage = isceobj.createImage()
inImage.load(in_file+'.xml')
inImage.filename = in_file
lkObj = Looks()
lkObj.setDownLooks(alks)
lkObj.setAcrossLooks(rlks)
lkObj.setInputImage(inImage)
lkObj.setOutputFilename(out_file)
lkObj.looks()
# copy the full resolution xml/vrt file from ./merged/geom_master to ./geom_master
# to facilitate the number of looks extraction
# the file path inside .xml file is not, but should, updated
shutil.copy(in_file+'.xml', out_file+'.full.xml')
shutil.copy(in_file+'.vrt', out_file+'.full.vrt')
return out_dir
def runMultilookGdal(in_dir, out_dir, alks, rlks):
print('generate multilooked geometry files with alks={} and rlks={}'.format(alks, rlks))
import gdal
# create 'geom_master' directory
if not os.path.isdir(out_dir):
os.makedirs(out_dir)
print('create directory: {}'.format(out_dir))
# multilook files one by one
for fbase in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask', 'waterMask']:
fname = '{}.rdr'.format(fbase)
in_file = os.path.join(in_dir, fname)
out_file = os.path.join(out_dir, fname)
if os.path.isfile(in_file):
ds = gdal.Open(in_file, gdal.GA_ReadOnly)
in_wid = ds.RasterXSize
in_len = ds.RasterYSize
out_wid = int(in_wid / rlks)
out_len = int(in_len / alks)
src_wid = out_wid * rlks
src_len = out_len * alks
cmd = 'gdal_translate -of ENVI -a_nodata 0 -outsize {ox} {oy} '.format(ox=out_wid, oy=out_len)
cmd += ' -srcwin 0 0 {sx} {sy} {fi} {fo} '.format(sx=src_wid, sy=src_len, fi=in_file, fo=out_file)
print(cmd)
os.system(cmd)
# copy the full resolution xml/vrt file from ./merged/geom_master to ./geom_master
# to facilitate the number of looks extraction
# the file path inside .xml file is not, but should, updated
shutil.copy(in_file+'.xml', out_file+'.full.xml')
shutil.copy(in_file+'.vrt', out_file+'.full.vrt')
return out_dir
def extractInfo(frame, inps):
@ -369,8 +453,8 @@ def extractInfo(frame, inps):
info.lookSide = frame.instrument.platform.pointingDirection
info.rangeFirstSample = frame.startingRange
info.numberRangeLooks = inps.rlks
info.numberAzimuthLooks = inps.alks
info.numberRangeLooks = 1 #inps.rlks
info.numberAzimuthLooks = 1 #inps.alks
fsamp = frame.rangeSamplingRate
@ -378,9 +462,9 @@ def extractInfo(frame, inps):
info.prf = frame.PRF
info.radarWavelength = frame.radarWavelegth
info.orbit = frame.getOrbit()
info.width = frame.getNumberOfSamples()
info.length = frame.getNumberOfLines()
info.width = frame.getNumberOfSamples()
info.length = frame.getNumberOfLines()
info.sensingStop = frame.getSensingStop()
info.outdir = inps.outdir
@ -389,7 +473,7 @@ def extractInfo(frame, inps):
def main(iargs=None):
inps = cmdLineParse(iargs)
# see if the user compiled isce with GPU enabled
@ -419,11 +503,9 @@ def main(iargs=None):
doppler = db['doppler']
except:
doppler = frame._dopplerVsPixel
db.close()
####Setup dem
demImage = isceobj.createDemImage()
demImage.load(inps.dem + '.xml')
@ -439,14 +521,20 @@ def main(iargs=None):
info.incFilename = os.path.join(info.outdir, 'incLocal.rdr')
info.maskFilename = os.path.join(info.outdir, 'shadowMask.rdr')
runTopo(info,demImage,dop=doppler,nativedop=inps.nativedop, legendre=inps.legendre)
runSimamp(os.path.dirname(info.heightFilename),os.path.basename(info.heightFilename))
# write multilooked geometry files in "geom_master" directory, same level as "Igrams"
if inps.rlks * inps.rlks > 1:
out_dir = os.path.join(os.path.dirname(os.path.dirname(info.outdir)), 'geom_master')
runMultilookGdal(in_dir=info.outdir, out_dir=out_dir, alks=inps.alks, rlks=inps.rlks)
#runMultilook(in_dir=info.outdir, out_dir=out_dir, alks=inps.alks, rlks=inps.rlks)
return
if __name__ == '__main__':
'''
Main driver.
'''
main()

View File

@ -73,8 +73,7 @@ def makeOnePlot(filename, pos):
minx = np.clip(np.min(pos[:,2])-win, 0, npix-1)
maxx = np.clip(np.max(pos[:,2])+win, 0, npix-1)
box = np.power(np.abs(data[miny:maxy, minx:maxx]), 0.4)
box = np.power(np.abs(data[int(miny):int(maxy), int(minx):int(maxx)]), 0.4)
plt.figure('CR analysis')
@ -104,7 +103,7 @@ def getAzRg(frame,llh):
pol._normRange = frame.instrument.rangePixelSize
pol.initPoly(azimuthOrder=0, rangeOrder=len(coeffs)-1, coeffs=[coeffs])
taz, rgm = frame.orbit.geo2rdr(list(llh), side=frame.instrument.platform.pointingDirection,
taz, rgm = frame.orbit.geo2rdr(list(llh)[1:], side=frame.instrument.platform.pointingDirection,
doppler=pol, wvl=frame.instrument.getRadarWavelength())
line = (taz - frame.sensingStart).total_seconds() * frame.PRF
@ -145,7 +144,7 @@ if __name__ == '__main__':
# frame.startingRange = frame.startingRange + 100.0
###Load CRS positions
llhs = np.loadtxt(inps.posfile)
llhs = np.loadtxt(inps.posfile, delimiter=',')
crs = []

View File

@ -1,38 +1,80 @@
## Sentinel-1 TOPS stack processor
The detailed algorithm for stack processing of TOPS data can be find here:
H. Fattahi, P. Agram, and M. Simons, “A network-based enhanced spectral diversity approach for TOPS time-series analysis,” IEEE Trans. Geosci. Remote Sens., vol. 55, no. 2, pp. 777786, Feb. 2017. (https://ieeexplore.ieee.org/abstract/document/7637021/)
+ Fattahi, H., P. Agram, and M. Simons (2016), A Network-Based Enhanced Spectral Diversity Approach for TOPS Time-Series Analysis, IEEE Transactions on Geoscience and Remote Sensing, 55(2), 777-786, doi:[10.1109/TGRS.2016.2614925](https://ieeexplore.ieee.org/abstract/document/7637021).
-----------------------------------
<<<<<< Sentinel-1 TOPS stack processor >>>>>>
To use the sentinel stack processor, make sure to add the path of your "contrib/stack/topsStack" folder to your $PATH environment varibale.
To use the sentinel stack processor, make sure to add the path of your `contrib/stack/topsStack` folder to your `$PATH` environment varibale.
The scripts provides support for Sentinel-1 TOPS stack processing. Currently supported workflows include a coregistered stack of SLC, interferograms, offsets, and coherence.
stackSentinel.py generates all configuration and run files required to be executed on a stack of Sentinel-1 TOPS data. When stackSentinel.py is executed for a given workflow (-W option) a “configs” and “run_files” folder is generated. No processing is performed at this stage. Within the run_files folder different run_#_description files are contained which are to be executed as shell scripts in the run number order. Each of these run scripts call specific configure files contained in the “configs” folder which call ISCE in a modular fashion. The configure and run files will change depending on the selected workflow. To make run_# files executable, change the file permission accordingly (e.g., chmod +x run_1_unpack_slc).
`stackSentinel.py` generates all configuration and run files required to be executed on a stack of Sentinel-1 TOPS data. When stackSentinel.py is executed for a given workflow (-W option) a **configs** and **run_files** folder is generated. No processing is performed at this stage. Within the run_files folder different run\_#\_description files are contained which are to be executed as shell scripts in the run number order. Each of these run scripts call specific configure files contained in the “configs” folder which call ISCE in a modular fashion. The configure and run files will change depending on the selected workflow. To make run_# files executable, change the file permission accordingly (e.g., `chmod +x run_1_unpack_slc`).
To see workflow examples, type “stackSentinel.py -H”
To get an overview of all the configurable parameters, type “stackSentinel.py -h”
```bash
stackSentinel.py -H #To see workflow examples,
stackSentinel.py -h #To get an overview of all the configurable parameters
```
Required parameters of stackSentinel.py include:
-s SLC_DIRNAME A folder with downloaded Sentinel-1 SLCs.
-o ORBIT_DIRNAME A folder containing the Sentinel-1 orbits.
Missing orbit files will be downloaded automatically
-a AUX_DIRNAME A folder containing the Sentinel-1 Auxiliary files
-d DEM A DEM (Digital Elevation Model) referenced to wgs84
```cfg
-s SLC_DIRNAME #A folder with downloaded Sentinel-1 SLCs.
-o ORBIT_DIRNAME #A folder containing the Sentinel-1 orbits. Missing orbit files will be downloaded automatically
-a AUX_DIRNAME #A folder containing the Sentinel-1 Auxiliary files
-d DEM_FILENAME #A DEM (Digital Elevation Model) referenced to wgs84
```
In the following, different workflow examples are provided. Note that stackSentinel.py only generates the run and configure files. To perform the actual processing, the user will need to execute each run file in their numbered order.
In all workflows, coregistration (-C option) can be done using only geometry (set option = geometry) or with geometry plus refined azimuth offsets through NESD (set option = NESD) approach, the latter being the default. For the NESD coregistrstion the user can control the ESD coherence threshold (-e option) and the number of overlap interferograms (-O) to be used in NESD estimation.
------------------------------ Example 1: Coregistered stack of SLC ----------------------------
Generate the run and configure files needed to generate a coregistered stack of SLCs.
In this example, a pre-defined bounding box is specified. Note, if the bounding box is not provided it is set by default to the common SLC area among all SLCs. We recommend that user always set the processing bounding box. Since ESA does not have a fixed frame definition, we suggest to download data for a larger bounding box compared to the actual bounding box used in stackSentinel.py. This way user can ensure to have required data to cover the region of interest. Here is an example command to create configuration files for a stack of SLCs:
#### AUX_CAL file download ####
The following calibration auxliary (AUX_CAL) file is used for **antenna pattern correction** to compensate the range phase offset of SAFE products with **IPF verison 002.36** (mainly for images acquired before March 2015). If all your SAFE products are from another IPF version, then no AUX files are needed. Check [ESA document](https://earth.esa.int/documents/247904/1653440/Sentinel-1-IPF_EAP_Phase_correction) for details.
Run the command below to download the AUX_CAL file once and store it somewhere (_i.e._ ~/aux/aux_cal) so that you can use it all the time, for `stackSentinel.py -a` or `auxiliary data directory` in `topsApp.py`.
```
wget https://qc.sentinel1.eo.esa.int/product/S1A/AUX_CAL/20140908T000000/S1A_AUX_CAL_V20140908T000000_G20190626T100201.SAFE.TGZ
tar zxvf S1A_AUX_CAL_V20140908T000000_G20190626T100201.SAFE.TGZ
rm S1A_AUX_CAL_V20140908T000000_G20190626T100201.SAFE.TGZ
```
#### 1. Create your project folder somewhere ####
```
mkdir MexicoSenAT72
cd MexicoSenAT72
```
#### 2. Prepare DEM ####
Download of DEM (need to use wgs84 version) using the ISCE DEM download script.
```
mkdir DEM; cd DEM
dem.py -a stitch -b 18 20 -100 -97 -r -s 1 c
rm demLat*.dem demLat*.dem.xml demLat*.dem.vrt
fixImageXml.py -f -i demLat*.dem.wgs84 #Updating DEMs wgs84 xml to include full path to the DEM
cd ..
```
#### 3. Download Sentinel-1 data to SLC ####
#### 4.1 Example workflow: Coregistered stack of SLC ####
Generate the run and configure files needed to generate a coregistered stack of SLCs. In this example, a pre-defined bounding box is specified. Note, if the bounding box is not provided it is set by default to the common SLC area among all SLCs. We recommend that user always set the processing bounding box. Since ESA does not have a fixed frame definition, we suggest to download data for a larger bounding box compared to the actual bounding box used in stackSentinel.py. This way user can ensure to have required data to cover the region of interest. Here is an example command to create configuration files for a stack of SLCs:
```
stackSentinel.py -s ../SLC/ -d ../DEM/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -a ../../AuxDir/ -o ../../Orbits -b '19 20 -99.5 -98.5' -W slc
```
by running the command above, the configs and run_files folders are created. User needs to execute each run file in order. The order is specified by the index number of the run file name. For the example above, the run_files folder includes the following files:
- run_1_unpack_slc_topo_master
- run_2_average_baseline
- run_3_extract_burst_overlaps
@ -46,72 +88,83 @@ by running the command above, the configs and run_files folders are created. Use
The generated run files are self descriptive. Below is a short explanation on what each run_file does:
***run_1_unpack_slc_topo_master:***
**run_1_unpack_slc_topo_master:**
Includes commands to unpack Sentinel-1 TOPS SLCs using ISCE readers. For older SLCs which need antenna elevation pattern correction, the file is extracted and written to disk. For newer version of SLCs which dont need the elevation antenna pattern correction, only a gdal virtual “vrt” file (and isce xml file) is generated. The “.vrt” file points to the Sentinel SLC file and reads them whenever required during the processing. If a user wants to write the “.vrt” SLC file to disk, it can be done easily using gdal_translate (e.g. gdal_translate of ENVI File.vrt File.slc).
The “run_1_unpack_slc_topo_master” also includes a command that refers to the config file of the stack master, which includes configuration for running topo for the stack master. Note that in the pair-wise processing strategy one should run topo (mapping from range-Doppler to geo coordinate) for all pairs. However, with stackSentinel, topo needs to be run only one time for the master in the stack.
***run_2_average_baseline: ***
**run_2_average_baseline:**
Computes average baseline for the stack. These baselines are not used for processing anywhere. They are only an approximation and can be used for plotting purposes. A more precise baseline grid is estimated later in run_10.
***run_3_extract_burst_overlaps: ***
**run_3_extract_burst_overlaps:**
Burst overlaps are extracted for estimating azimuth misregistration using NESD technique. If coregistration method is chosen to be “geometry”, then this run file wont exist and the overlaps are not extracted.
***run_4_overlap_geo2rdr_resample: ***
**run_4_overlap_geo2rdr_resample:***
Running geo2rdr to estimate geometrical offsets between slave burst overlaps and the stack master burst overlaps. The slave burst overlaps are then resampled to the stack master burst overlaps.
***run_5_pairs_misreg: ***
**run_5_pairs_misreg:**
Using the coregistered stack burst overlaps generated from the previous step, differential overlap interferograms are generated and are used for estimating azimuth misregistration using Enhanced Spectral Diversity (ESD) technique.
***run_6_timeseries_misreg: ***
**run_6_timeseries_misreg:**
A time-series of azimuth and range misregistration is estimated with respect to the stack master. The time-series is a least squares esatimation from the pair misregistration from the previous step.
***run_7_geo2rdr_resample: ***
**run_7_geo2rdr_resample:**
Using orbit and DEM, geometrical offsets among all slave SLCs and the stack master is computed. The goometrical offsets, together with the misregistration time-series (from previous step) are used for precise coregistration of each burst SLC.
***run_8_extract_stack_valid_region: ***
**run_8_extract_stack_valid_region:**
The valid region between burst SLCs at the overlap area of the bursts slightly changes for different acquisitions. Therefore we need to keep track of these overlaps which will be used during merging bursts. Without these knowledge, lines of invalid data may appear in the merged products at the burst overlaps.
***run_9_merge: ***
**run_9_merge:**
Merges all bursts for the master and coregistered SLCs. The geometry files are also merged including longitude, latitude, shadow and layer mask, line-of-sight files, etc. .
***run_10_grid_baseline: ***
**run_10_grid_baseline:**
A coarse grid of baselines between each slave SLC and the stack master is generated. This is not used in any computation.
#### 4.2 Example workflow: Coregistered stack of SLC with modified parameters ####
-------- Example 2: Coregistered stack of SLC with modified parameters -----------
In the following example, the same stack generation is requested but where the threshold of the default coregistration approach (NESD) is relaxed from its default 0.85 value 0.7.
```
stackSentinel.py -s ../SLC/ -d ../DEM/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -a ../../AuxDir/ -o ../../Orbits -b '19 20 -99.5 -98.5' -W slc -e 0.7
```
When running all the run files, the final products are located in the merge folder which has subdirectories “geom_master”, “baselines” and “SLC”. The “geom_master” folder contains geometry products such as longitude, latitude, height, local incidence angle, look angle, heading, and shadowing/layover mask files. The “baselines” folder contains sparse grids of the perpendicular baseline for each acquisition, while the “SLC” folder contains the coregistered SLCs
When running all the run files, the final products are located in the merge folder which has subdirectories **geom_master**, **baselines** and **SLC**. The **geom_master** folder contains geometry products such as longitude, latitude, height, local incidence angle, look angle, heading, and shadowing/layover mask files. The **baselines** folder contains sparse grids of the perpendicular baseline for each acquisition, while the **SLC** folder contains the coregistered SLCs
#### 4.3 Example workflow: Stack of interferograms ####
------------------------------ Example 3: Stack of interferograms ------------------------------
Generate the run and configure files needed to generate a stack of interferograms.
In this example, a stack of interferograms is requested for which up to 2 nearest neighbor connections are included.
```
stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -c 2
```
In the following example, all possible interferograms are being generated and in which the coregistration approach is set to use geometry and not the default NESD.
```
stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C geometry -c all
```
When executing all the run files, a coregistered stack of slcs are produced, the burst interferograms are generated and then merged. Merged interferograms are multilooked, filtered and unwrapped. Geocoding is not applied. If users need to geocode any product, they can use the geocodeGdal.py script.
#### 4.4 Example workflow: Stack of correlation ####
-------------------- Example 4: Correlation stack example ----------------------------
Generate the run and configure files needed to generate a stack of coherence.
In this example, a correlation stack is requested considering all possible coherence pairs and where the coregistration approach is done using geometry only.
```
stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem.wgs84 -b '19 20 -99.5 -98.5' -a ../../AuxDir/ -o ../../Orbits -C geometry -c all -W correlation
```
This workflow is basically similar to the previous one. The difference is that the interferograms are not unwrapped.
----------------------------------- DEM download example -----------------------------------
Download of DEM (need to use wgs84 version) using the ISCE DEM download script.
dem.py -a stitch -b 18 20 -100 -97 -r -s 1 c
Updating DEMs wgs84 xml to include full path to the DEM
fixImageXml.py -f -i demLat_N18_N20_Lon_W100_W097.dem.wgs84
#### 5. Execute the commands in run files (run_1*, run_2*, etc) in the "run_files" folder ####