Merge branch 'main' into gpu
commit
a44a894c69
|
@ -2,43 +2,29 @@ version: 2.1
|
|||
jobs:
|
||||
test-cmake:
|
||||
docker:
|
||||
- image: hysds/pge-base:latest
|
||||
user: root
|
||||
- image: ubuntu:20.04
|
||||
steps:
|
||||
- checkout:
|
||||
path: /root/project/src
|
||||
|
||||
- run:
|
||||
name: Install development tools
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
yum update -y
|
||||
yum groupinstall -y "development tools"
|
||||
|
||||
- run:
|
||||
name: Install ISCE requirements
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
mkdir config build install
|
||||
. /opt/conda/bin/activate root
|
||||
conda install -y cython gdal h5py libgdal pytest numpy fftw scipy scons hdf4 hdf5 libgcc libstdcxx-ng cmake astropy pybind11
|
||||
yum install -y x11-devel motif-devel jq gcc-gfortran opencv opencv-devel opencv-python
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get install -y cmake cython3 git libfftw3-dev libgdal-dev libhdf4-alt-dev libhdf5-dev libopencv-dev python3-gdal python3-h5py python3-numpy python3-scipy
|
||||
|
||||
- run:
|
||||
name: Build and Install ISCE
|
||||
command: |
|
||||
set -ex
|
||||
cd /root/project/src
|
||||
. /opt/conda/bin/activate root
|
||||
mkdir build
|
||||
cd build
|
||||
INSTALLPATH=/opt/conda
|
||||
MODPATH=$(python3 -c "import site; print(site.getsitepackages()[0])")
|
||||
# convert to relative path
|
||||
MODPATH=$(realpath --relative-to=$INSTALLPATH $MODPATH)
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=$INSTALLPATH -DPYTHON_MODULE_DIR=$MODPATH
|
||||
MODPATH=$(python3 -c "import site; print(site.getsitepackages()[-1])")
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=install -DPYTHON_MODULE_DIR=$MODPATH
|
||||
make install VERBOSE=y
|
||||
|
||||
- run:
|
||||
|
@ -46,7 +32,6 @@ jobs:
|
|||
command: |
|
||||
set -ex
|
||||
cd /root/project/src/build
|
||||
. /opt/conda/bin/activate root
|
||||
ctest --output-on-failure
|
||||
ISCE2DIR=$(python3 -c "import os, isce2; print(os.path.dirname(isce2.__file__))" | tail -n 1)
|
||||
export PATH=$ISCE2DIR/applications:$PATH
|
||||
|
@ -60,37 +45,20 @@ jobs:
|
|||
|
||||
test:
|
||||
docker:
|
||||
- image: hysds/pge-base:latest
|
||||
user: root
|
||||
- image: ubuntu:20.04
|
||||
steps:
|
||||
- checkout:
|
||||
path: /root/project/src
|
||||
|
||||
- run:
|
||||
name: Install development tools
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
yum update -y
|
||||
yum groupinstall -y "development tools"
|
||||
|
||||
- run:
|
||||
name: Install ISCE requirements
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
mkdir config build install
|
||||
. /opt/conda/bin/activate root
|
||||
conda install --yes cython gdal h5py libgdal pytest numpy fftw scipy scons hdf4 hdf5 libgcc libstdcxx-ng cmake astropy pybind11
|
||||
yum install -y uuid-devel x11-devel motif-devel jq gcc-gfortran opencv opencv-devel opencv-python
|
||||
ln -s /opt/conda/bin/cython /opt/conda/bin/cython3
|
||||
cd /opt/conda/lib
|
||||
unlink libuuid.so
|
||||
unlink libuuid.so.1
|
||||
ln -s /lib64/libuuid.so.1.3.0 libuuid.so
|
||||
ln -s /lib64/libuuid.so.1.3.0 libuuid.so.1
|
||||
cd /lib64
|
||||
test -f libgfortran.so || ln -sv libgfortran.so.*.* libgfortran.so
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get install -y scons cython3 git libfftw3-dev libgdal-dev libhdf4-alt-dev libhdf5-dev libmotif-dev libopencv-dev libx11-dev python3-gdal python3-h5py python3-numpy python3-scipy
|
||||
|
||||
- run:
|
||||
name: Build SConfigISCE and setup dirs
|
||||
|
@ -100,10 +68,10 @@ jobs:
|
|||
cd config
|
||||
echo "PRJ_SCONS_BUILD = /root/project/build" > SConfigISCE
|
||||
echo "PRJ_SCONS_INSTALL = /root/project/install/isce" >> SConfigISCE
|
||||
echo "LIBPATH = /usr/lib64 /usr/lib /opt/conda/lib" >> SConfigISCE
|
||||
python_inc="$(echo /opt/conda/include/python3.* /opt/conda/lib/python3.*/site-packages/numpy/core/include)"
|
||||
echo "CPPPATH = $python_inc /opt/conda/include /usr/include" >> SConfigISCE
|
||||
echo "FORTRANPATH = /usr/include /opt/conda/include" >> SConfigISCE
|
||||
echo "LIBPATH = /usr/lib64 /usr/lib /usr/lib/x86_64-linux-gnu" >> SConfigISCE
|
||||
python_inc="/usr/include/python3.8 /usr/lib/python3/dist-packages/numpy/core/include"
|
||||
echo "CPPPATH = $python_inc /usr/include /usr/include/gdal /usr/include/opencv4" >> SConfigISCE
|
||||
echo "FORTRANPATH = /usr/include" >> SConfigISCE
|
||||
echo "FORTRAN = /bin/gfortran" >> SConfigISCE
|
||||
echo "CC = /bin/gcc" >> SConfigISCE
|
||||
echo "CXX = /bin/g++" >> SConfigISCE
|
||||
|
@ -111,7 +79,7 @@ jobs:
|
|||
echo "X11LIBPATH = /usr/lib64" >> SConfigISCE
|
||||
echo "MOTIFINCPATH = /usr/include" >> SConfigISCE
|
||||
echo "X11INCPATH = /usr/include" >> SConfigISCE
|
||||
echo "RPATH = /opt/conda/lib /usr/lib64 /usr/lib" >> SConfigISCE
|
||||
echo "RPATH = /usr/lib64 /usr/lib" >> SConfigISCE
|
||||
cat SConfigISCE
|
||||
|
||||
- run:
|
||||
|
@ -119,10 +87,7 @@ jobs:
|
|||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
. /opt/conda/bin/activate root
|
||||
cd src
|
||||
export PATH="/opt/conda/bin:$PATH"
|
||||
export LD_LIBRARY_PATH="/opt/conda/lib:$LD_LIBRARY_PATH"
|
||||
SCONS_CONFIG_DIR=/root/project/config scons install --skipcheck
|
||||
|
||||
- run:
|
||||
|
@ -130,11 +95,9 @@ jobs:
|
|||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
. /opt/conda/bin/activate root
|
||||
ISCE_HOME=/root/project/install/isce
|
||||
export PATH="$ISCE_HOME/bin:$ISCE_HOME/applications:/opt/conda/bin:$PATH"
|
||||
export PATH="$ISCE_HOME/bin:$ISCE_HOME/applications:$PATH"
|
||||
export PYTHONPATH="/root/project/install:$PYTHONPATH"
|
||||
export LD_LIBRARY_PATH="/opt/conda/lib:$LD_LIBRARY_PATH"
|
||||
topsApp.py --help --steps
|
||||
stripmapApp.py --help --steps
|
||||
python3 -c "import isce"
|
||||
|
|
|
@ -41,12 +41,17 @@ if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_LIST_DIR}")
|
|||
endif()
|
||||
|
||||
if(NOT DEFINED PYTHON_MODULE_DIR)
|
||||
set(PYTHON_MODULE_DIR packages CACHE PATH
|
||||
"Python module directory (relative to install prefix)")
|
||||
set(PYTHON_MODULE_DIR packages CACHE PATH "Python module directory")
|
||||
endif()
|
||||
if(NOT DEFINED ISCE2_PKG)
|
||||
set(ISCE2_PKG ${PYTHON_MODULE_DIR}/isce2 CACHE PATH
|
||||
"ISCE 2 python package install dir (relative to install prefix)")
|
||||
"ISCE 2 python package install dir")
|
||||
endif()
|
||||
|
||||
if(IS_ABSOLUTE "${ISCE2_PKG}")
|
||||
set(ISCE2_PKG_FULL "${ISCE2_PKG}")
|
||||
else()
|
||||
set(ISCE2_PKG_FULL "${CMAKE_INSTALL_PREFIX}/${ISCE2_PKG}")
|
||||
endif()
|
||||
|
||||
include(isce2_buildflags)
|
||||
|
@ -95,3 +100,9 @@ else()
|
|||
endif()
|
||||
install(CODE "execute_process(COMMAND
|
||||
${CMAKE_COMMAND} -E create_symlink ${symsrc} ${symdest})")
|
||||
|
||||
# Enable native packaging using CPack
|
||||
if(NOT CPACK_PACKAGE_CONTACT)
|
||||
set(CPACK_PACKAGE_CONTACT "Ryan Burns <rtburns@jpl.nasa.gov>")
|
||||
endif()
|
||||
include(CPack)
|
||||
|
|
|
@ -32,11 +32,12 @@ install(PROGRAMS ${files}
|
|||
|
||||
# Symlink apps into PREFIX/bin so they are on the $PATH
|
||||
install(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory \
|
||||
${CMAKE_INSTALL_PREFIX}/bin)"
|
||||
${CMAKE_INSTALL_FULL_BINDIR})"
|
||||
)
|
||||
|
||||
foreach(file ${files})
|
||||
install(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink \
|
||||
${CMAKE_INSTALL_PREFIX}/${ISCE2_PKG}/applications/${file} \
|
||||
${CMAKE_INSTALL_PREFIX}/bin/${file})"
|
||||
${ISCE2_PKG_FULL}/applications/${file} \
|
||||
${CMAKE_INSTALL_FULL_BINDIR}/${file})"
|
||||
)
|
||||
endforeach()
|
||||
|
|
|
@ -222,8 +222,8 @@ def readOffset(filename):
|
|||
offsets = OffsetField()
|
||||
for linex in lines:
|
||||
#linexl = re.split('\s+', linex)
|
||||
#detect blank lines with only spaces and tabs
|
||||
if linex.strip() == '':
|
||||
#detect blank lines with only spaces and tabs, lines with invalid numbers
|
||||
if (linex.strip() == '') or ('*' in linex):
|
||||
continue
|
||||
|
||||
linexl = linex.split()
|
||||
|
|
|
@ -237,7 +237,8 @@ class Track(object):
|
|||
indx = self.findOverlapLine(sortedList[i-1][1],sortedList[i][1],endLine,width,i-1,i)
|
||||
#if indx is not None than indx is the new start line
|
||||
#otherwise we use startLine computed from acquisition time
|
||||
if (indx is not None) and (indx + sortedList[i-1][0] != sortedList[i][0]):
|
||||
#no need to do this for ALOS; otherwise there will be problems when there are multiple prfs and the data are interpolated. C. Liang, 20-dec-2021
|
||||
if (self._frames[0].instrument.platform._mission != 'ALOS') and (indx is not None) and (indx + sortedList[i-1][0] != sortedList[i][0]):
|
||||
startLine.append(indx + sortedList[i-1][0])
|
||||
outputs.append(sortedList[i][1])
|
||||
self.logger.info("Changing starting line for frame %d from %d to %d"%(i,endLine,indx))
|
||||
|
|
|
@ -185,8 +185,10 @@ class ALOS(Sensor):
|
|||
|
||||
productLevel = float(self.leaderFile.sceneHeaderRecord.metadata[
|
||||
'Product level code'])
|
||||
if productLevel == 1.0:
|
||||
self.updateRawParameters()
|
||||
|
||||
#this proves to be not accurate. Adjusting first frame is OK, but adjusting following frames would cause discontinuities between frames. C. Liang, 20-dec-2021
|
||||
# if productLevel == 1.0:
|
||||
# self.updateRawParameters()
|
||||
pass
|
||||
|
||||
def _populatePlatform(self):
|
||||
|
|
|
@ -146,14 +146,11 @@ class UAVSAR_Stack(Component):
|
|||
frame.setSensingStart(tStart)
|
||||
frame.setSensingStop(tStop)
|
||||
frame.setSensingMid(tMid)
|
||||
frame.setNumberOfLines(
|
||||
int(self.metadata['slc_1_1x1_mag.set_rows']))
|
||||
frame.setNumberOfSamples(
|
||||
int(self.metadata['slc_1_1x1_mag.set_cols']))
|
||||
frame.setNumberOfLines(int(self.metadata['slc_{}_1x1_mag.set_rows'.format(self.segment_index)]))
|
||||
frame.setNumberOfSamples(int(self.metadata['slc_{}_1x1_mag.set_cols'.format(self.segment_index)]))
|
||||
frame.setPolarization(self.metadata['Polarization'])
|
||||
frame.C0 = self.metadata['slc_1_1x1_mag.col_addr']
|
||||
frame.S0 = self.metadata[
|
||||
'Segment {} Data Starting Azimuth'.format(self.segment_index)]
|
||||
frame.C0 = self.metadata['slc_{}_1x1_mag.col_addr'.format(self.segment_index)]
|
||||
frame.S0 = self.metadata['Segment {} Data Starting Azimuth'.format(self.segment_index)]
|
||||
frame.nearLookAngle = self.metadata['Minimum Look Angle']
|
||||
frame.farLookAngle = self.metadata['Maximum Look Angle']
|
||||
frame.setStartingRange(self.startingRange)
|
||||
|
@ -175,8 +172,7 @@ class UAVSAR_Stack(Component):
|
|||
#of values in degrees at each corner (without rdf unit specified)
|
||||
llC = []
|
||||
for ic in range(1,5):
|
||||
key = 'Segment {0} Data Approximate Corner {1}'.format(
|
||||
self.segment_index, ic)
|
||||
key = 'Segment {0} Data Approximate Corner {1}'.format(self.segment_index, ic)
|
||||
self.logger.info("key = {}".format(key))
|
||||
self.logger.info("metadata[key] = {}".format(self.metadata[key], type(self.metadata[key])))
|
||||
llC.append(list(map(float, self.metadata[key].split(','))))
|
||||
|
@ -316,9 +312,9 @@ class UAVSAR_Stack(Component):
|
|||
drho = instrument.getRangePixelSize() #full res value, not spacing in the dop file
|
||||
prf = instrument.getPulseRepetitionFrequency()
|
||||
self.logger.info("extractDoppler: rho0, drho, prf = {}, {}, {}".format(rho0, drho, prf))
|
||||
dopfile = self.metadata['dop']
|
||||
f = open(dopfile,'r')
|
||||
x = f.readlines() #first line is a header
|
||||
dopfile = getattr(self, 'dopplerFile', self.metadata['dop'])
|
||||
with open(dopfile,'r') as f:
|
||||
x = f.readlines() #first line is a header
|
||||
|
||||
import numpy
|
||||
z = numpy.array(
|
||||
|
@ -337,21 +333,20 @@ class UAVSAR_Stack(Component):
|
|||
res2 = fit[1][0] #sum of squared residuals
|
||||
self.logger.info("coeffs = {}".format(coefs))
|
||||
self.logger.info("rms residual = {}".format(numpy.sqrt(res2/len(dop))))
|
||||
o = open("dop.txt", 'w')
|
||||
for i, d in zip(rhoi, dop):
|
||||
val = polyval(coefs,i)
|
||||
res = d-val
|
||||
o.write("{0} {1} {2} {3}\n".format(i, d, val, res))
|
||||
dopfile = self.metadata['dop']
|
||||
with open("dop.txt", 'w') as o:
|
||||
for i, d in zip(rhoi, dop):
|
||||
val = polyval(coefs,i)
|
||||
res = d-val
|
||||
o.write("{0} {1} {2} {3}\n".format(i, d, val, res))
|
||||
|
||||
self.dopplerVals = {'Near':polyval(coefs, 0)} #need this temporarily in this module
|
||||
|
||||
self.logger.info("UAVSAR_Stack.extractDoppler: self.dopplerVals = {}".format(self.dopplerVals))
|
||||
self.logger.info("UAVSAR_Stack.extractDoppler: prf = {}".format(prf))
|
||||
|
||||
#The doppler file values are in units rad/m. divide by 2*pi rad/cycle to convert
|
||||
#to cycle/m. Then multiply by velocity to get Hz and divide by prf for dimensionless
|
||||
#doppler coefficients
|
||||
#The doppler file values are in units rad/m. divide by 2*pi rad/cycle to convert
|
||||
#to cycle/m. Then multiply by velocity to get Hz and divide by prf for dimensionless
|
||||
#doppler coefficients
|
||||
dop_scale = self.velocity/2.0/math.pi
|
||||
coefs = [x*dop_scale for x in coefs]
|
||||
#Set the coefs in frame._dopplerVsPixel because that is where DefaultDopp looks for them
|
||||
|
@ -361,14 +356,14 @@ class UAVSAR_Stack(Component):
|
|||
|
||||
@property
|
||||
def terrainHeight(self):
|
||||
#The peg point incorporates the actual terrainHeight
|
||||
#The peg point incorporates the actual terrainHeight
|
||||
return 0.0
|
||||
|
||||
@property
|
||||
def platformHeight(self):
|
||||
h = self.metadata['Global Average Altitude']
|
||||
#Reduce the platform height by the terrain height because the
|
||||
#peg radius of curvature includes the terrain height
|
||||
#Reduce the platform height by the terrain height because the
|
||||
#peg radius of curvature includes the terrain height
|
||||
h -= self.metadata['Global Average Terrain Height']
|
||||
return h
|
||||
|
||||
|
|
|
@ -409,7 +409,7 @@ char date[8];
|
|||
//line number of first line of this prf file
|
||||
line_number_first = (rspi_new.SC_clock_start[i] - rspi_pre[0].SC_clock_start[0]) * d2s / (1.0 / rspi_pre[0].prf[0]);
|
||||
//unit: pri of first prf of first image
|
||||
num_lines_out = (rspi_pre[image_i].frame_counter_end[i] - rspi_pre[image_i].frame_counter_start[i] + 1) * (1.0/rspi_pre[image_i].prf[i]) / (1.0/rspi_pre[0].prf[0]);
|
||||
num_lines_out = (int)((rspi_pre[image_i].frame_counter_end[i] - rspi_pre[image_i].frame_counter_start[i] + 1) * (1.0/rspi_pre[image_i].prf[i]) / (1.0/rspi_pre[0].prf[0]));
|
||||
|
||||
if((fabs(roundfi(line_number_first)-line_number_first)<0.1) && (rspi_pre[image_i].prf[i]==rspi_pre[0].prf[0]))
|
||||
continue;
|
||||
|
@ -469,7 +469,7 @@ char date[8];
|
|||
//append prf i
|
||||
for(i = 1; i < rspi_new.nPRF; i++){
|
||||
//number of lines to be appended between frames if there are gaps
|
||||
num_lines_append = (rspi_new.SC_clock_start[i] - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0]) - rspi_new.num_lines[0];
|
||||
num_lines_append = roundfi((rspi_new.SC_clock_start[i] - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0])) - rspi_new.num_lines[0];
|
||||
if(num_lines_append >= 1){
|
||||
for(j = 0; j < num_lines_append; j++){
|
||||
for(k = 0; k < 2*rspi_new.num_bins[i]; k++)
|
||||
|
@ -485,7 +485,7 @@ char date[8];
|
|||
die("can't open", rspi_new.input_file[i]);
|
||||
num_lines_append = 0;
|
||||
for(j = 0; j < rspi_new.num_lines[i]; j++){
|
||||
if((rspi_new.SC_clock_start[i] + j * (1.0/rspi_pre[0].prf[0]) / d2s - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0]) >= rspi_new.num_lines[0]){
|
||||
if(roundfi((rspi_new.SC_clock_start[i] + j * (1.0/rspi_pre[0].prf[0]) / d2s - rspi_new.SC_clock_start[0]) * d2s / (1.0/rspi_pre[0].prf[0])) >= rspi_new.num_lines[0]){
|
||||
if(fread((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, next_prf_fp) != 1)
|
||||
die("can't read data from", rspi_new.input_file[i]);
|
||||
if(fwrite((char *)data, 2*sizeof(char)*rspi_new.num_bins[i], 1, first_prf_fp) != 1)
|
||||
|
|
|
@ -144,7 +144,8 @@ c print *,val
|
|||
end
|
||||
|
||||
integer*4 function unpackBytes(i1, i2, i3, i4)
|
||||
integer*4 i1, i2, i3, i4
|
||||
unpackBytes = iand(i1, 255)*256*256*256 + iand(i2, 255)*256*256 +
|
||||
$ iand(i3, 255)*256 + iand(i4, 255)
|
||||
integer*1 i1, i2, i3, i4, i255
|
||||
i255 = 255 ! needed to keep gfortran >9 happy
|
||||
unpackBytes = iand(i1, i255)*256*256*256 + iand(i2, i255)*256*256 +
|
||||
$ iand(i3, i255)*256 + iand(i4, i255)
|
||||
end function
|
||||
|
|
|
@ -64,6 +64,7 @@ def setup(self):
|
|||
#1: use mean value of a burst
|
||||
#2: use full burst
|
||||
ionParam.azshiftFlag = 1
|
||||
ionParam.maskedAreas = None
|
||||
|
||||
#better NOT try changing the following two parameters, since they are related
|
||||
#to the filtering parameters above
|
||||
|
@ -386,7 +387,14 @@ def subband(self, ionParam):
|
|||
from isceobj.Util.Poly2D import Poly2D
|
||||
from contrib.alos2proc.alos2proc import rg_filter
|
||||
|
||||
from isceobj.TopsProc.runFineResamp import resampSecondary
|
||||
# decide whether to use CPU or GPU
|
||||
hasGPU = self.useGPU and self._insar.hasGPU()
|
||||
if hasGPU:
|
||||
from isceobj.TopsProc.runFineResamp import resampSecondaryGPU as resampSecondary
|
||||
print('Using GPU for fineresamp')
|
||||
else:
|
||||
from isceobj.TopsProc.runFineResamp import resampSecondaryCPU as resampSecondary
|
||||
|
||||
from isceobj.TopsProc.runFineResamp import getRelativeShifts
|
||||
from isceobj.TopsProc.runFineResamp import adjustValidSampleLine
|
||||
from isceobj.TopsProc.runFineResamp import getValidLines
|
||||
|
@ -424,8 +432,8 @@ def subband(self, ionParam):
|
|||
##############################################################
|
||||
#for resampling
|
||||
relShifts = getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart)
|
||||
print('Shifts IW-{0}: '.format(swath), relShifts)
|
||||
|
||||
print('Shifts IW-{0}: '.format(swath), relShifts)
|
||||
|
||||
####Can corporate known misregistration here
|
||||
apoly = Poly2D()
|
||||
apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]])
|
||||
|
@ -445,8 +453,8 @@ def subband(self, ionParam):
|
|||
|
||||
#only process common bursts
|
||||
for ii in range(minBurst, maxBurst):
|
||||
jj = secondaryBurstStart + ii - minBurst
|
||||
|
||||
jj = secondaryBurstStart + ii - minBurst
|
||||
|
||||
masBurst = reference.bursts[ii]
|
||||
slvBurst = secondary.bursts[jj]
|
||||
|
||||
|
@ -469,10 +477,10 @@ def subband(self, ionParam):
|
|||
#removing window
|
||||
rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * burst.rangePixelSize)
|
||||
if burst.rangeWindowType == 'Hamming':
|
||||
removeHammingWindow(burst.image.filename, tmpFilename, burst.rangeProcessingBandwidth, rangeSamplingRate, burst.rangeWindowCoefficient, virtual=virtual)
|
||||
removeHammingWindow(burst.image.filename, tmpFilename, burst.rangeProcessingBandwidth, rangeSamplingRate, burst.rangeWindowCoefficient, virtual=virtual)
|
||||
else:
|
||||
raise Exception('Range weight window type: {} is not supported yet!'.format(burst.rangeWindowType))
|
||||
|
||||
|
||||
#subband
|
||||
rg_filter(tmpFilename,
|
||||
#burst.numberOfSamples,
|
||||
|
@ -550,7 +558,7 @@ def subband(self, ionParam):
|
|||
minAz=minAz, maxAz=maxAz,
|
||||
minRng=minRg, maxRng=maxRg)
|
||||
slvBurstResamp2.image.filename = outimg.filename
|
||||
|
||||
|
||||
#forming interferogram
|
||||
referencename = masBurst2.image.filename
|
||||
secondaryname = slvBurstResamp2.image.filename
|
||||
|
@ -835,7 +843,7 @@ def snaphuUnwrap(self, xmlDirname, wrapName, corrfile, unwrapName, nrlks, nalks,
|
|||
ifg = self._insar.loadProduct( os.path.join(xmlDirname, 'IW{0}.xml'.format(swath)))
|
||||
wavelength = ifg.bursts[0].radarWavelength
|
||||
|
||||
####tmid
|
||||
####tmid
|
||||
tstart = ifg.bursts[0].sensingStart
|
||||
tend = ifg.bursts[-1].sensingStop
|
||||
tmid = tstart + 0.5*(tend - tstart)
|
||||
|
@ -856,7 +864,7 @@ def snaphuUnwrap(self, xmlDirname, wrapName, corrfile, unwrapName, nrlks, nalks,
|
|||
azimuthLooks = nalks
|
||||
azfact = 0.8
|
||||
rngfact = 0.8
|
||||
corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact)
|
||||
corrLooks = rangeLooks * azimuthLooks/(azfact*rngfact)
|
||||
maxComponents = 20
|
||||
|
||||
snp = Snaphu()
|
||||
|
@ -1036,8 +1044,8 @@ def multilook_unw(self, ionParam, mergedDirname):
|
|||
lowerint = np.fromfile(lowerIntName, dtype=np.complex64).reshape(lengthNew, widthNew)
|
||||
upperint = np.fromfile(upperIntName, dtype=np.complex64).reshape(lengthNew, widthNew)
|
||||
cor = np.zeros((lengthNew*2, widthNew), dtype=np.float32)
|
||||
cor[0:length*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 )
|
||||
cor[1:length*2:2, :] = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4)
|
||||
cor[0:lengthNew*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 )
|
||||
cor[1:lengthNew*2:2, :] = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4)
|
||||
cor.astype(np.float32).tofile(corName)
|
||||
|
||||
|
||||
|
@ -1089,7 +1097,7 @@ def fit_surface(x, y, z, wgt, order):
|
|||
a = a1 * np.matlib.repmat(np.sqrt(wgt), 1, n)
|
||||
b = z * np.sqrt(wgt)
|
||||
c = np.linalg.lstsq(a, b, rcond=-1)[0]
|
||||
|
||||
|
||||
#type: <class 'numpy.ndarray'>
|
||||
return c
|
||||
|
||||
|
@ -1247,7 +1255,7 @@ def computeIonosphere(lowerUnw, upperUnw, cor, fl, fu, adjFlag, corThresholdAdj,
|
|||
#fl = SPEED_OF_LIGHT / ionParam.radarWavelengthLower
|
||||
#fu = SPEED_OF_LIGHT / ionParam.radarWavelengthUpper
|
||||
f0 = (fl + fu) / 2.0
|
||||
|
||||
|
||||
#dispersive
|
||||
if dispersive == 0:
|
||||
ionos = fl * fu * (lowerUnw * fu - upperUnw * fl) / f0 / (fu**2 - fl**2)
|
||||
|
@ -1330,7 +1338,7 @@ def cal_cross_ab_ramp(swathList, width, numberRangeLooks, passDirection):
|
|||
numberRangeLooks: number of range looks in the processing of ionosphere estimation
|
||||
passDirection: descending/ascending
|
||||
'''
|
||||
|
||||
|
||||
#below is from processing chile_d156_160725(S1A)-160929(S1B)
|
||||
#empirical polynomial
|
||||
deg = 3
|
||||
|
@ -1359,7 +1367,7 @@ def cal_cross_ab_ramp(swathList, width, numberRangeLooks, passDirection):
|
|||
# here I just simply ignore this case
|
||||
offset = swath_offset[swathList[0]-1]
|
||||
x = offset / tnp + width / tnp * np.arange(width2) / (width2 - 1.0)
|
||||
|
||||
|
||||
#calculate ramp
|
||||
y_fit = x * 0.0
|
||||
for i in range(deg+1):
|
||||
|
@ -1373,7 +1381,7 @@ def ionSwathBySwath(self, ionParam):
|
|||
This routine merge, unwrap and compute ionosphere swath by swath, and then
|
||||
adjust phase difference between adjacent swaths caused by relative range timing
|
||||
error between adjacent swaths.
|
||||
|
||||
|
||||
This routine includes the following steps in the merged-swath processing:
|
||||
merge(self, ionParam)
|
||||
unwrap(self, ionParam)
|
||||
|
@ -1510,7 +1518,7 @@ def ionSwathBySwath(self, ionParam):
|
|||
#img.bands = 2
|
||||
#img.filename = corfile
|
||||
#img.renderHdr()
|
||||
|
||||
|
||||
#img = isceobj.Image.createUnwImage()
|
||||
img = isceobj.createOffsetImage()
|
||||
img.setFilename(corfile)
|
||||
|
@ -1603,7 +1611,7 @@ def ionSwathBySwath(self, ionParam):
|
|||
else:
|
||||
print('number of samples available for adjustment in the overlap area: {}'.format(index[0].size))
|
||||
#diff = np.mean((ionosList[1] - adjdata)[index], dtype=np.float64)
|
||||
|
||||
|
||||
#use weighted mean instead
|
||||
wgt = corList[1][index]**14
|
||||
diff = np.sum((ionosList[1] - adjdata)[index] * wgt / np.sum(wgt, dtype=np.float64), dtype=np.float64)
|
||||
|
@ -1694,7 +1702,7 @@ def computeDopplerOffset(burst, firstline, lastline, firstcolumn, lastcolumn, nr
|
|||
'''
|
||||
|
||||
Vs = np.linalg.norm(burst.orbit.interpolateOrbit(burst.sensingMid, method='hermite').getVelocity())
|
||||
Ks = 2 * Vs * burst.azimuthSteeringRate / burst.radarWavelength
|
||||
Ks = 2 * Vs * burst.azimuthSteeringRate / burst.radarWavelength
|
||||
|
||||
#firstcolumn, lastcolumn: index starts from 1
|
||||
rng = multilookIndex(firstcolumn-1, lastcolumn-1, nrlks) * burst.rangePixelSize + burst.startingRange
|
||||
|
@ -1712,7 +1720,7 @@ def computeDopplerOffset(burst, firstline, lastline, firstcolumn, lastcolumn, nr
|
|||
#center doppler frequency due to squint
|
||||
dopplerOffset2 = (f_etac[None,:] / Ka[None,:]) / (burst.azimuthTimeInterval * nalks)
|
||||
dopplerOffset = dopplerOffset1 + dopplerOffset2
|
||||
|
||||
|
||||
return (dopplerOffset, Ka)
|
||||
|
||||
|
||||
|
@ -1776,7 +1784,7 @@ def grd2ion(self, ionParam):
|
|||
offset = ratio * dopplerOffset
|
||||
|
||||
# 0 1 2 3
|
||||
#firstlineAdj, lastlineAdj, firstcolumnAdj, lastcolumnAdj,
|
||||
#firstlineAdj, lastlineAdj, firstcolumnAdj, lastcolumnAdj,
|
||||
#after multiplication, index starts from 1
|
||||
firstline = np.int(np.around((burstValidBox[i][j][0] - 1) / ionParam.numberAzimuthLooks + 1))
|
||||
lastline = np.int(np.around(burstValidBox[i][j][1] / ionParam.numberAzimuthLooks))
|
||||
|
@ -1794,7 +1802,7 @@ def grd2ion(self, ionParam):
|
|||
index = index0 + offset[:, k]
|
||||
value = burstImage[:, k]
|
||||
f = interp1d(index, value, kind='cubic', fill_value="extrapolate")
|
||||
|
||||
|
||||
index_min = np.int(np.around(np.amin(index)))
|
||||
index_max = np.int(np.around(np.amax(index)))
|
||||
flag = index0 * 0.0
|
||||
|
@ -1879,7 +1887,7 @@ def adaptive_gaussian(ionos, wgt, size_max, size_min):
|
|||
std_mv = np.mean(std[np.nonzero(std!=0)], dtype=np.float64)
|
||||
diff_max = np.amax(np.absolute(std - std_mv)) + std_mv + 1
|
||||
std[np.nonzero(std==0)] = diff_max
|
||||
|
||||
|
||||
index = np.nonzero(np.ones((length, width))) + ((np.argmin(np.absolute(std - std_mv), axis=2)).reshape(length*width), )
|
||||
out = flt[index]
|
||||
out = out.reshape((length, width))
|
||||
|
@ -1961,7 +1969,7 @@ def filt_gaussian(self, ionParam):
|
|||
ion_fit *= 0
|
||||
|
||||
ion -= ion_fit * (ion!=0)
|
||||
|
||||
|
||||
#minimize the effect of low coherence pixels
|
||||
#cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001
|
||||
#filt = adaptive_gaussian(ion, cor, size_max, size_min)
|
||||
|
@ -2175,7 +2183,7 @@ def ion2grd(self, ionParam):
|
|||
if azshiftFlag == 2:
|
||||
f2 = interp1d(indexRange2, dion[i, :], kind='cubic', fill_value="extrapolate")
|
||||
dionOneRangeLook[i, :] = f2(indexRange)
|
||||
|
||||
|
||||
#use the satellite height of the mid burst of first swath of reference acquistion
|
||||
swathList = self._insar.getValidSwathList(self.swaths)
|
||||
reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swathList[0])))
|
||||
|
@ -2296,7 +2304,7 @@ def ion2grd(self, ionParam):
|
|||
def multilook(data, nalks, nrlks):
|
||||
'''
|
||||
doing multiple looking
|
||||
|
||||
|
||||
ATTENTION:
|
||||
NO AVERAGING BY DIVIDING THE NUMBER OF TOTAL SAMPLES IS DONE.
|
||||
'''
|
||||
|
@ -2383,7 +2391,7 @@ def esd(self, ionParam):
|
|||
|
||||
if nBurst <= 1:
|
||||
continue
|
||||
|
||||
|
||||
####Load relevant products
|
||||
reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath)))
|
||||
secondary = self._insar.loadProduct( os.path.join(self._insar.fineCoregDirname, 'IW{0}.xml'.format(swath)))
|
||||
|
@ -2392,21 +2400,21 @@ def esd(self, ionParam):
|
|||
for ii in range(minBurst, maxBurst):
|
||||
jj = ii - minBurst
|
||||
####Process the top bursts
|
||||
masBurst = reference.bursts[ii]
|
||||
masBurst = reference.bursts[ii]
|
||||
slvBurst = secondary.bursts[jj]
|
||||
adjustValidLineSample(masBurst,slvBurst)
|
||||
overlapBox = get_overlap_box(reference, minBurst, maxBurst)
|
||||
|
||||
|
||||
#using esd to calculate mis-registration
|
||||
misreg = np.array([])
|
||||
totalSamples = 0
|
||||
for ii in range(minBurst+1, maxBurst):
|
||||
jj = ii - minBurst
|
||||
####Process the top bursts
|
||||
masBurstTop = reference.bursts[ii-1]
|
||||
masBurstTop = reference.bursts[ii-1]
|
||||
slvBurstTop = secondary.bursts[jj-1]
|
||||
|
||||
masBurstCur = reference.bursts[ii]
|
||||
masBurstCur = reference.bursts[ii]
|
||||
slvBurstCur = secondary.bursts[jj]
|
||||
|
||||
#get info
|
||||
|
@ -2464,7 +2472,7 @@ def esd(self, ionParam):
|
|||
for ii in range(minBurst, maxBurst):
|
||||
jj = ii - minBurst
|
||||
####Process the top bursts
|
||||
masBurst = reference.bursts[ii]
|
||||
masBurst = reference.bursts[ii]
|
||||
slvBurst = secondary.bursts[jj]
|
||||
|
||||
ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1))
|
||||
|
@ -2504,7 +2512,7 @@ def esd_noion(self, ionParam):
|
|||
|
||||
if nBurst <= 1:
|
||||
continue
|
||||
|
||||
|
||||
####Load relevant products
|
||||
reference = self._insar.loadProduct( os.path.join(self._insar.referenceSlcProduct, 'IW{0}.xml'.format(swath)))
|
||||
secondary = self._insar.loadProduct( os.path.join(self._insar.fineCoregDirname, 'IW{0}.xml'.format(swath)))
|
||||
|
@ -2513,21 +2521,21 @@ def esd_noion(self, ionParam):
|
|||
for ii in range(minBurst, maxBurst):
|
||||
jj = ii - minBurst
|
||||
####Process the top bursts
|
||||
masBurst = reference.bursts[ii]
|
||||
masBurst = reference.bursts[ii]
|
||||
slvBurst = secondary.bursts[jj]
|
||||
adjustValidLineSample(masBurst,slvBurst)
|
||||
overlapBox = get_overlap_box(reference, minBurst, maxBurst)
|
||||
|
||||
|
||||
#using esd to calculate mis-registration
|
||||
misreg = np.array([])
|
||||
totalSamples = 0
|
||||
for ii in range(minBurst+1, maxBurst):
|
||||
jj = ii - minBurst
|
||||
####Process the top bursts
|
||||
masBurstTop = reference.bursts[ii-1]
|
||||
masBurstTop = reference.bursts[ii-1]
|
||||
slvBurstTop = secondary.bursts[jj-1]
|
||||
|
||||
masBurstCur = reference.bursts[ii]
|
||||
masBurstCur = reference.bursts[ii]
|
||||
slvBurstCur = secondary.bursts[jj]
|
||||
|
||||
#get info
|
||||
|
@ -2589,12 +2597,12 @@ def esd_noion(self, ionParam):
|
|||
for ii in range(minBurst, maxBurst):
|
||||
jj = ii - minBurst
|
||||
####Process the top bursts
|
||||
masBurst = reference.bursts[ii]
|
||||
masBurst = reference.bursts[ii]
|
||||
slvBurst = secondary.bursts[jj]
|
||||
|
||||
#ionname = os.path.join(ionParam.ionDirname, ionParam.ionBurstDirname, 'IW{0}'.format(swath), '%s_%02d.ion'%('burst',ii+1))
|
||||
#ion = np.fromfile(ionname, dtype=np.float32).reshape(masBurst.numberOfLines, masBurst.numberOfSamples)
|
||||
|
||||
|
||||
(dopplerOffset, Ka) = computeDopplerOffset(masBurst, 1, masBurst.numberOfLines, 1, masBurst.numberOfSamples, nrlks=1, nalks=1)
|
||||
centerFrequency = dopplerOffset * Ka[None,:] * (masBurst.azimuthTimeInterval)
|
||||
|
||||
|
|
|
@ -9,8 +9,21 @@ target_link_libraries(geogrid PRIVATE
|
|||
GDAL::GDAL
|
||||
isce2::combinedLib
|
||||
)
|
||||
|
||||
Python_add_library(geogridOptical MODULE
|
||||
bindings/geogridOpticalmodule.cpp
|
||||
src/geogridOptical.cpp
|
||||
)
|
||||
target_include_directories(geogridOptical PRIVATE
|
||||
include
|
||||
)
|
||||
target_link_libraries(geogridOptical PRIVATE
|
||||
GDAL::GDAL
|
||||
isce2::combinedLib
|
||||
)
|
||||
|
||||
InstallSameDir(
|
||||
geogridOptical
|
||||
geogrid
|
||||
__init__.py
|
||||
Geogrid.py
|
||||
|
|
|
@ -17,6 +17,7 @@ c----------------------------------------------------------------
|
|||
INTEGER EGNR,AGNR,OGNR
|
||||
REAL LATI,LONGI
|
||||
COMMON/GENER/ UMR,ERA,AQUAD,BQUAD
|
||||
character*80 PATH
|
||||
C
|
||||
CALL INITIZE
|
||||
ibbb=0
|
||||
|
@ -53,11 +54,12 @@ C 2005.00 04/25/05 CALL FELDI and DO 1111 I=1,7 (Alexey Petrov)
|
|||
C 2005.01 11/10/05 added igrf_dip and geodip (MLAT)
|
||||
C 2005.02 11/10/05 updated to IGRF-10 version
|
||||
C 2006.00 12/21/06 GH2(120) -> GH2(144)
|
||||
c 2022/01/23 added path to be consistent with igrf_bvector (Eric Fielding)
|
||||
C
|
||||
C*********************************************************************
|
||||
|
||||
subroutine igrf_sub(xlat,xlong,year,height,
|
||||
& xl,icode,dip,dec)
|
||||
& xl,icode,dip,dec,path)
|
||||
c----------------------------------------------------------------
|
||||
c INPUT:
|
||||
c xlat geodatic latitude in degrees
|
||||
|
@ -65,6 +67,7 @@ c xlong geodatic longitude in degrees
|
|||
c year decimal year (year+month/12.0-0.5 or year+day-of-year/365
|
||||
c or 366 if leap year)
|
||||
c height height in km
|
||||
c path the path to the data files
|
||||
c OUTPUT:
|
||||
c xl L value
|
||||
c icode =1 L is correct; =2 L is not correct;
|
||||
|
@ -76,6 +79,7 @@ c----------------------------------------------------------------
|
|||
INTEGER EGNR,AGNR,OGNR
|
||||
REAL LATI,LONGI
|
||||
COMMON/GENER/ UMR,ERA,AQUAD,BQUAD
|
||||
character*80 PATH
|
||||
C
|
||||
CALL INITIZE
|
||||
ibbb=0
|
||||
|
@ -86,7 +90,7 @@ C
|
|||
c
|
||||
C----------------CALCULATE PROFILES-----------------------------------
|
||||
c
|
||||
CALL FELDCOF(YEAR,DIMO)
|
||||
CALL FELDCOF(YEAR,DIMO,PATH)
|
||||
CALL FELDG(LATI,LONGI,HEIGHT,BNORTH,BEAST,BDOWN,BABS)
|
||||
CALL SHELLG(LATI,LONGI,HEIGHT,DIMO,XL,ICODE,BAB1)
|
||||
DIP=ASIN(BDOWN/BABS)/UMR
|
||||
|
|
|
@ -1,14 +1,15 @@
|
|||
#!/usr/bin/env python3
|
||||
# modified to pass the segment number to UAVSAR_STACK sensor EJF 2020/08/02
|
||||
|
||||
import os
|
||||
import glob
|
||||
import argparse
|
||||
import shelve
|
||||
import isce
|
||||
from isceobj.Sensor import createSensor
|
||||
import shelve
|
||||
import argparse
|
||||
import glob
|
||||
from isceobj.Util import Poly1D
|
||||
from isceobj.Planet.AstronomicalHandbook import Const
|
||||
import os
|
||||
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
|
@ -16,46 +17,46 @@ def cmdLineParse():
|
|||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Unpack UAVSAR SLC data and store metadata in pickle file.')
|
||||
parser.add_argument('-i','--input', dest='h5dir', type=str,
|
||||
required=True, help='Input UAVSAR directory')
|
||||
parser.add_argument('-i','--input', dest='metaFile', type=str,
|
||||
required=True, help='metadata file')
|
||||
parser.add_argument('-d','--dop_file', dest='dopFile', type=str,
|
||||
default=None, help='Doppler file')
|
||||
parser.add_argument('-s','--segment', dest='stackSegment', type=int,
|
||||
default=1, help='stack segment')
|
||||
parser.add_argument('-o', '--output', dest='slcdir', type=str,
|
||||
parser.add_argument('-o', '--output', dest='slcDir', type=str,
|
||||
required=True, help='Output SLC directory')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def unpack(hdf5, slcname, dopFile, stackSegment, parse=False):
|
||||
def unpack(metaFile, slcDir, dopFile, stackSegment, parse=False):
|
||||
'''
|
||||
Unpack HDF5 to binary SLC file.
|
||||
Prepare shelve/pickle file for the binary SLC file.
|
||||
'''
|
||||
|
||||
obj = createSensor('UAVSAR_STACK')
|
||||
obj.configure()
|
||||
obj.metadataFile = hdf5
|
||||
obj.metadataFile = metaFile
|
||||
obj.dopplerFile = dopFile
|
||||
obj.segment_index = stackSegment
|
||||
obj.parse()
|
||||
|
||||
if not os.path.isdir(slcname):
|
||||
os.mkdir(slcname)
|
||||
if not os.path.isdir(slcDir):
|
||||
os.mkdir(slcDir)
|
||||
|
||||
pickName = os.path.join(slcname, 'data')
|
||||
pickName = os.path.join(slcDir, 'data')
|
||||
with shelve.open(pickName) as db:
|
||||
db['frame'] = obj.frame
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
|
||||
inps = cmdLineParse()
|
||||
if inps.slcdir.endswith('/'):
|
||||
inps.slcdir = inps.slcdir[:-1]
|
||||
inps.slcDir = inps.slcDir.rstrip('/')
|
||||
inps.metaFile = os.path.abspath(inps.metaFile)
|
||||
inps.dopFile = os.path.abspath(inps.dopFile)
|
||||
inps.slcDir = os.path.abspath(inps.slcDir)
|
||||
|
||||
if inps.h5dir.endswith('/'):
|
||||
inps.h5dir = inps.h5dir[:-1]
|
||||
|
||||
unpack(inps.h5dir, inps.slcdir, inps.dopFile, inps.stackSegment)
|
||||
unpack(inps.metaFile, inps.slcDir, inps.dopFile, inps.stackSegment)
|
||||
|
|
|
@ -165,3 +165,124 @@ stackSentinel.py -s ../SLC/ -d ../../MexicoCity/demLat_N18_N20_Lon_W100_W097.dem
|
|||
This workflow is basically similar to the previous one. The difference is that the interferograms are not unwrapped.
|
||||
|
||||
#### 5. Execute the commands in run files (run_01*, run_02*, etc) in the "run_files" folder ####
|
||||
|
||||
|
||||
|
||||
-----------------------------------
|
||||
### Ionospheric Phase Estimation
|
||||
|
||||
Ionospheric phase estimation can be performed for each of the workflow introduced above. Generally, we should do ionospheric phase estimation for data acquired at low latitudes on ascending tracks. However, ionospheric phase estimation only works well in areas with high coherence since it requires phase unwrapping. Details about Sentinel-1 ionospheric correction can be found in
|
||||
|
||||
+ C. Liang, P. Agram, M. Simons, and E. J. Fielding, “Ionospheric correction of InSAR time series analysis of C-band Sentinel-1 TOPS data,” IEEE Transactions on Geoscience and Remote Sensing, vol. 57, no. 9, pp. 6755-6773, Sep. 2019.
|
||||
|
||||
Ionospheric phase estimation has more requirements than regular InSAR processing. The most important two requirements include
|
||||
|
||||
- All the acquistions need to be connected in the entire network.
|
||||
|
||||
- The swath starting ranges need to be the same among all acquistions; otherwise a phase offset between adjacent swaths need to be estimated in order to maintain consistency among the swaths, which might not be accurate enough.
|
||||
|
||||
#### 1. select the usable acquistions ####
|
||||
|
||||
In stack ionospheric phase estimation, acquistions with same swath starting ranges are put in a group. A network is formed within a group. Extra pairs are also processed to connect the different groups so that all acquistions are connected. But we need to estimate a phase offset for these extra pairs, which might not be accurate. Therefore, for a particualr swath starting ranges, if there are only a few acquistions, it's better to just discard them so that we don't have to estimate the phase offsets.
|
||||
|
||||
```
|
||||
s1_select_ion.py -dir data/slc -sn 33.550217/37.119545 -nr 10
|
||||
```
|
||||
|
||||
Acquistions to be used need to fully cover the south/north bounds. After running this command, acquistion not to be used will be put in a folder named 'not_used'. It's OK to run this command multiple times.
|
||||
|
||||
#### 2. generate configure and run files ####
|
||||
|
||||
In stackSentinel.py, two options are for ionospheric phase estimation
|
||||
|
||||
- --param_ion
|
||||
- --num_connections_ion
|
||||
|
||||
An example --param_ion file 'ion_param.txt' is provided in the code directory. For example, we want to do ionospheric phase estimation when processing a stack of interferograms
|
||||
|
||||
```
|
||||
stackSentinel.py -s ../data/slc -d ../data/dem/dem_1_arcsec/demLat_N32_N41_Lon_W113_W107.dem.wgs84 -b '33.550217 37.119545 -111.233932 -107.790451' -a ../data/s1_aux_cal -o ../data/orbit -C geometry -c 2 --param_ion ../code/ion_param.txt --num_connections_ion 3
|
||||
```
|
||||
|
||||
If ionospheric phase estimation is enabled in stackSentinel.py, it will generate the following run files. Here ***ns*** means number of steps in the original stack processing, which depends on the type of stack (slc, correlation, interferogram, and offset).
|
||||
|
||||
- run_ns+1_subband_and_resamp
|
||||
- run_ns+2_generateIgram_ion
|
||||
- run_ns+3_mergeBurstsIon
|
||||
- run_ns+4_unwrap_ion
|
||||
- run_ns+5_look_ion
|
||||
- run_ns+6_computeIon
|
||||
- run_ns+7_filtIon
|
||||
- run_ns+8_invertIon
|
||||
|
||||
Note about **'areas masked out in ionospheric phase estimation'** in ion_param.txt. Seperated islands or areas usually lead to phase unwrapping errors and therefore significantly affect ionospheric phase estimation. It's better to mask them out. Check ion/date1_date2/ion_cal/raw_no_projection.ion for areas to be masked out. However, we don't have this file before processing the data. To quickly get this file, we can process a stack of two acquistions to get this file. NOTE that the reference of this two-acquisition stack should be the same as that of the full stack we want to process.
|
||||
|
||||
**run_ns+1_subband_and_resamp**
|
||||
|
||||
Two subband burst SLCs are generated for each burst. For secondary acquistions, the subband burst SLCs are also resampled to match reference burst SLCs. If the subband burst SLCs already exists, the program simply skips the burst.
|
||||
|
||||
**run_ns+2_generateIgram_ion**
|
||||
|
||||
Generate subband burst interferograms.
|
||||
|
||||
**run_ns+3_mergeBurstsIon**
|
||||
|
||||
Merge subband burst interferograms, and create a unique coherence file used in ionospheric phase estimation. This will be done swath by swath if the two acquistions of the pair has different swath starting ranges.
|
||||
|
||||
**run_ns+4_unwrap_ion**
|
||||
|
||||
Unwrap merged subband interferograms. This will be done swath by swath if the two acquistions of the pair has different swath starting ranges.
|
||||
|
||||
**run_ns+5_look_ion**
|
||||
|
||||
Take further looks on the unwrapped interferograms, and create the unique coherence file based on the further number of looks. This will be done swath by swath if the two acquistions of the pair has different swath starting ranges.
|
||||
|
||||
**run_ns+6_computeIon**
|
||||
|
||||
Compute ionospheric phase. This will be done swath by swath if the two acquistions of the pair has different swath starting ranges, and then the swath ionospheric phase will be merged.
|
||||
|
||||
**run_ns+7_filtIon**
|
||||
|
||||
Filter ionospheric phase.
|
||||
|
||||
**run_ns+8_invertIon**
|
||||
|
||||
Estimate ionospheric phase for each date. We highly recommend inspecting all pair ionospheric phases ion/date1_date2/ion_cal/filt.ion, and exclude those with anomalies in this command.
|
||||
|
||||
Typical anomalies include dense fringes caused by phase unwrapping errors, and a range ramp as a result of errors in estimating phase offsets for pairs with different swath starting ranges (check pairs_diff_starting_ranges.txt).
|
||||
|
||||
#### 3. run command files generated ####
|
||||
|
||||
Run the commands sequentially.
|
||||
|
||||
#### 4. check results ####
|
||||
|
||||
Results from ionospheric phase estimation.
|
||||
|
||||
- reference and coreg_secondarys: now contains also subband burst SLCs
|
||||
- ion: original ionospheric phase estimation results
|
||||
- ion_dates: ionospheric phase for each acquistion
|
||||
- ion/date1_date2/ion_cal/filt.ion: filtered ionospheric phase
|
||||
- ion/date1_date2/ion_cal/raw_no_projection.ion: original ionospheric phase
|
||||
- ion/date1_date2/lower/merged/fine_look.unw: unwrapped lower band interferogram
|
||||
- ion/date1_date2/upper/merged/fine_look.unw: unwrapped upper band interferogram
|
||||
|
||||
If ionospheric phase estimation processing is swath by swath because of different swath starting ranges, there will be swath processing directories including
|
||||
|
||||
- ion/date1_date2/ion_cal_IW*
|
||||
- ion/date1_date2/lower/merged_IW*
|
||||
- ion/date1_date2/upper/merged_IW*
|
||||
|
||||
After processing, we can plot ionospheric phase estimation results using plotIonPairs.py and plotIonDates.py. For example
|
||||
|
||||
```
|
||||
plotIonPairs.py -idir ion -odir ion_plot
|
||||
plotIonDates.py -idir ion_dates -odir ion_dates_plot
|
||||
```
|
||||
|
||||
Relationships of the ionospheric phases:
|
||||
|
||||
```
|
||||
ion_dates/date1.ion - ion_dates/date2.ion = ion/date1_date2/ion_cal/filt.ion
|
||||
ion_dates/date1.ion - ion_dates/date2.ion = ionospheric phase in merged/interferograms/date1_date2/filt_fine.unw
|
||||
```
|
||||
|
|
|
@ -260,6 +260,88 @@ class config(object):
|
|||
#self.f.write('ww : 256\n')
|
||||
#self.f.write('wh : 128\n')
|
||||
|
||||
def subband_and_resamp(self, function):
|
||||
self.f.write('##########################' + '\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('subband_and_resamp : ' + '\n')
|
||||
self.f.write('reference : ' + self.reference + '\n')
|
||||
self.f.write('secondary : ' + self.secondary + '\n')
|
||||
self.f.write('coregdir : ' + self.coregdir + '\n')
|
||||
self.f.write('azimuth_misreg : ' + self.azimuth_misreg + '\n')
|
||||
self.f.write('range_misreg : ' + self.range_misreg + '\n')
|
||||
|
||||
def subband(self, function):
|
||||
self.f.write('##########################' + '\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('subband : ' + '\n')
|
||||
self.f.write('directory : ' + self.reference + '\n')
|
||||
|
||||
def generateIgram_ion(self, function):
|
||||
self.f.write('###################################'+'\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('generateIgram : ' + '\n')
|
||||
self.f.write('reference : ' + self.reference + '\n')
|
||||
self.f.write('reference_suffix : ' + self.reference_suffix + '\n')
|
||||
self.f.write('secondary : ' + self.secondary + '\n')
|
||||
self.f.write('secondary_suffix : ' + self.secondary_suffix + '\n')
|
||||
self.f.write('interferogram : ' + self.interferogram + '\n')
|
||||
self.f.write('flatten : ' + self.flatten + '\n')
|
||||
self.f.write('interferogram_prefix : ' + self.interferogram_prefix +'\n')
|
||||
self.f.write('overlap : ' + self.overlap + '\n')
|
||||
|
||||
def mergeBurstsIon(self, function):
|
||||
self.f.write('###################################'+'\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('mergeBurstsIon : ' + '\n')
|
||||
self.f.write('reference : ' + self.reference + '\n')
|
||||
self.f.write('stack : ' + self.stack + '\n')
|
||||
self.f.write('dirname : ' + self.dirname + '\n')
|
||||
self.f.write('name_pattern : ' + self.name_pattern + '\n')
|
||||
self.f.write('outfile : ' + self.outfile + '\n')
|
||||
self.f.write('nrlks : ' + '{}'.format(self.nrlks) + '\n')
|
||||
self.f.write('nalks : ' + '{}'.format(self.nalks) + '\n')
|
||||
self.f.write('nrlks0 : ' + '{}'.format(self.nrlks0) +'\n')
|
||||
self.f.write('nalks0 : ' + '{}'.format(self.nalks0) + '\n')
|
||||
if self.rvalid is not None:
|
||||
self.f.write('rvalid : ' + '{}'.format(self.rvalid) + '\n')
|
||||
if self.swath is not None:
|
||||
self.f.write('swath : ' + self.swath + '\n')
|
||||
|
||||
def coherenceIon(self, function):
|
||||
self.f.write('###################################'+'\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('coherenceIon : ' + '\n')
|
||||
self.f.write('lower : ' + self.lower + '\n')
|
||||
self.f.write('upper : ' + self.upper + '\n')
|
||||
self.f.write('coherence : ' + self.coherence + '\n')
|
||||
if self.nrlks is not None:
|
||||
self.f.write('nrlks : ' + '{}'.format(self.nrlks) + '\n')
|
||||
if self.nalks is not None:
|
||||
self.f.write('nalks : ' + '{}'.format(self.nalks) + '\n')
|
||||
|
||||
def lookUnwIon(self, function):
|
||||
self.f.write('###################################'+'\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('lookUnwIon : ' + '\n')
|
||||
self.f.write('unw : ' + self.unw + '\n')
|
||||
self.f.write('cor : ' + self.cor + '\n')
|
||||
self.f.write('output : ' + self.output + '\n')
|
||||
self.f.write('nrlks : ' + '{}'.format(self.nrlks) + '\n')
|
||||
self.f.write('nalks : ' + '{}'.format(self.nalks) + '\n')
|
||||
|
||||
def filtIon(self, function):
|
||||
self.f.write('###################################'+'\n')
|
||||
self.f.write(function + '\n')
|
||||
self.f.write('filtIon : ' + '\n')
|
||||
self.f.write('input : ' + self.input + '\n')
|
||||
self.f.write('coherence : ' + self.coherence + '\n')
|
||||
self.f.write('output : ' + self.output + '\n')
|
||||
self.f.write('win_min : ' + '{}'.format(self.win_min) + '\n')
|
||||
self.f.write('win_max : ' + '{}'.format(self.win_max) + '\n')
|
||||
|
||||
|
||||
|
||||
|
||||
def write_wrapper_config2run_file(self, configName, line_cnt, numProcess = 1):
|
||||
# dispassionate list of commands for single process
|
||||
if numProcess == 1:
|
||||
|
@ -276,6 +358,287 @@ class config(object):
|
|||
self.f.close()
|
||||
|
||||
|
||||
class ionParamUsr(object):
|
||||
'''A class containing parameters for ionosphere estimation specified by user
|
||||
while considerBurstProperties is not availavle for stack processing,
|
||||
ionParam still has parameters associated with considerBurstProperties for bookkeeping.
|
||||
'''
|
||||
|
||||
def __init__(self, usrInput):
|
||||
# usrInput: usrInput txt file
|
||||
self.usrInput = usrInput
|
||||
|
||||
def configure(self):
|
||||
#default values same as topsApp.py
|
||||
#only ION_applyIon is removed, compared with topsApp.py
|
||||
self.ION_doIon = False
|
||||
self.ION_considerBurstProperties = False
|
||||
|
||||
self.ION_ionHeight = 200.0
|
||||
self.ION_ionFit = True
|
||||
self.ION_ionFilteringWinsizeMax = 200
|
||||
self.ION_ionFilteringWinsizeMin = 100
|
||||
self.ION_ionshiftFilteringWinsizeMax = 150
|
||||
self.ION_ionshiftFilteringWinsizeMin = 75
|
||||
self.ION_azshiftFlag = 1
|
||||
|
||||
self.ION_maskedAreas = None
|
||||
|
||||
self.ION_numberAzimuthLooks = 50
|
||||
self.ION_numberRangeLooks = 200
|
||||
self.ION_numberAzimuthLooks0 = 10
|
||||
self.ION_numberRangeLooks0 = 40
|
||||
|
||||
|
||||
#get above parameters from usr input
|
||||
with open(self.usrInput, 'r') as f:
|
||||
lines = f.readlines()
|
||||
|
||||
for x in lines:
|
||||
x = x.strip()
|
||||
if x == '' or x.strip().startswith('#'):
|
||||
continue
|
||||
else:
|
||||
x2 = x.split(':')
|
||||
if 'do ionosphere correction' == x2[0].strip():
|
||||
self.ION_doIon = eval(x2[1].strip().capitalize())
|
||||
if 'consider burst properties in ionosphere computation' == x2[0].strip():
|
||||
self.ION_considerBurstProperties = eval(x2[1].strip().capitalize())
|
||||
|
||||
if 'height of ionosphere layer in km' == x2[0].strip():
|
||||
self.ION_ionHeight = float(x2[1].strip())
|
||||
if 'apply polynomial fit before filtering ionosphere phase' == x2[0].strip():
|
||||
self.ION_ionFit = eval(x2[1].strip().capitalize())
|
||||
if 'maximum window size for filtering ionosphere phase' == x2[0].strip():
|
||||
self.ION_ionFilteringWinsizeMax = int(x2[1].strip())
|
||||
if 'minimum window size for filtering ionosphere phase' == x2[0].strip():
|
||||
self.ION_ionFilteringWinsizeMin = int(x2[1].strip())
|
||||
if 'maximum window size for filtering ionosphere azimuth shift' == x2[0].strip():
|
||||
self.ION_ionshiftFilteringWinsizeMax = int(x2[1].strip())
|
||||
if 'minimum window size for filtering ionosphere azimuth shift' == x2[0].strip():
|
||||
self.ION_ionshiftFilteringWinsizeMin = int(x2[1].strip())
|
||||
if 'correct phase error caused by ionosphere azimuth shift' == x2[0].strip():
|
||||
self.ION_azshiftFlag = int(x2[1].strip())
|
||||
|
||||
if 'areas masked out in ionospheric phase estimation' == x2[0].strip():
|
||||
if x2[1].strip().capitalize() == 'None':
|
||||
self.ION_maskedAreas = None
|
||||
else:
|
||||
self.ION_maskedAreas = []
|
||||
x3 = x2[1].replace('[', '').replace(']', '').split(',')
|
||||
if len(x3)%4 != 0:
|
||||
raise Exception('there must be four elements for each area.')
|
||||
else:
|
||||
narea = int(len(x3)/4)
|
||||
for i in range(narea):
|
||||
self.ION_maskedAreas.append([int(x3[i*4+0].strip()), int(x3[i*4+1].strip()), int(x3[i*4+2].strip()), int(x3[i*4+3].strip())])
|
||||
|
||||
if 'total number of azimuth looks in the ionosphere processing' == x2[0].strip():
|
||||
self.ION_numberAzimuthLooks = int(x2[1].strip())
|
||||
if 'total number of range looks in the ionosphere processing' == x2[0].strip():
|
||||
self.ION_numberRangeLooks = int(x2[1].strip())
|
||||
if 'number of azimuth looks at first stage for ionosphere phase unwrapping' == x2[0].strip():
|
||||
self.ION_numberAzimuthLooks0 = int(x2[1].strip())
|
||||
if 'number of range looks at first stage for ionosphere phase unwrapping' == x2[0].strip():
|
||||
self.ION_numberRangeLooks0 = int(x2[1].strip())
|
||||
|
||||
def print(self):
|
||||
'''print parameters'''
|
||||
|
||||
print()
|
||||
|
||||
print('ionosphere estimation parameters:')
|
||||
print('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
|
||||
print("do ionosphere correction (ION_doIon): {}".format(self.ION_doIon))
|
||||
print("consider burst properties in ionosphere computation (ION_considerBurstProperties): {}".format(self.ION_considerBurstProperties))
|
||||
|
||||
print("height of ionosphere layer in km (ION_ionHeight): {}".format(self.ION_ionHeight))
|
||||
print("apply polynomial fit before filtering ionosphere phase (ION_ionFit): {}".format(self.ION_ionFit))
|
||||
print("maximum window size for filtering ionosphere phase (ION_ionFilteringWinsizeMax): {}".format(self.ION_ionFilteringWinsizeMax))
|
||||
print("minimum window size for filtering ionosphere phase (ION_ionFilteringWinsizeMin): {}".format(self.ION_ionFilteringWinsizeMin))
|
||||
print("maximum window size for filtering ionosphere azimuth shift (ION_ionshiftFilteringWinsizeMax): {}".format(self.ION_ionshiftFilteringWinsizeMax))
|
||||
print("minimum window size for filtering ionosphere azimuth shift (ION_ionshiftFilteringWinsizeMin): {}".format(self.ION_ionshiftFilteringWinsizeMin))
|
||||
print("correct phase error caused by ionosphere azimuth shift (ION_azshiftFlag): {}".format(self.ION_azshiftFlag))
|
||||
print("areas masked out in ionospheric phase estimation (ION_maskedAreas): {}".format(self.ION_maskedAreas))
|
||||
|
||||
print("total number of azimuth looks in the ionosphere processing (ION_numberAzimuthLooks): {}".format(self.ION_numberAzimuthLooks))
|
||||
print("total number of range looks in the ionosphere processing (ION_numberRangeLooks): {}".format(self.ION_numberRangeLooks))
|
||||
print("number of azimuth looks at first stage for ionosphere phase unwrapping (ION_numberAzimuthLooks0): {}".format(self.ION_numberAzimuthLooks0))
|
||||
print("number of range looks at first stage for ionosphere phase unwrapping (ION_numberRangeLooks0): {}".format(self.ION_numberRangeLooks0))
|
||||
|
||||
print()
|
||||
|
||||
|
||||
class ionParam(object):
|
||||
'''A class containing parameters for ionosphere estimation
|
||||
while considerBurstProperties is not availavle for stack processing,
|
||||
ionParam still has parameters associated with considerBurstProperties for bookkeeping.
|
||||
'''
|
||||
|
||||
def __init__(self, usrInput=None, safeObjFirst=None, safeObjSecondary=None):
|
||||
# usrInput: usrInput parameter object
|
||||
# safeObjFirst: sentinelSLC object defined in Stack.py of first date
|
||||
# safeObjSecond: sentinelSLC object defined in Stack.py of second date
|
||||
self.usrInput = usrInput
|
||||
self.safeObjFirst = safeObjFirst
|
||||
self.safeObjSecondary = safeObjSecondary
|
||||
|
||||
def configure(self):
|
||||
#all paramters have default values, update the relevant parameters using
|
||||
#self.usrInput, self.safeObjFirst, self.safeObjSecondary
|
||||
#when they are not None
|
||||
|
||||
###################################################################
|
||||
#users are supposed to change parameters of this section ONLY
|
||||
#SECTION 1. PROCESSING CONTROL PARAMETERS
|
||||
#1. suggested default values of the parameters
|
||||
self.doIon = False
|
||||
self.considerBurstProperties = False
|
||||
|
||||
#ionospheric layer height (m)
|
||||
self.ionHeight = 200.0 * 1000.0
|
||||
#before filtering ionosphere, if applying polynomial fitting
|
||||
#False: no fitting
|
||||
#True: with fitting
|
||||
self.ionFit = True
|
||||
#window size for filtering ionosphere
|
||||
self.ionFilteringWinsizeMax = 200
|
||||
self.ionFilteringWinsizeMin = 100
|
||||
#window size for filtering azimuth shift caused by ionosphere
|
||||
self.ionshiftFilteringWinsizeMax = 150
|
||||
self.ionshiftFilteringWinsizeMin = 75
|
||||
#correct phase error caused by non-zero center frequency and azimuth shift caused by ionosphere
|
||||
#0: no correction
|
||||
#1: use mean value of a burst
|
||||
#2: use full burst
|
||||
self.azshiftFlag = 1
|
||||
self.maskedAreas = None
|
||||
|
||||
#better NOT try changing the following two parameters, since they are related
|
||||
#to the filtering parameters above
|
||||
#number of azimuth looks in the processing of ionosphere estimation
|
||||
self.numberAzimuthLooks = 50
|
||||
#number of range looks in the processing of ionosphere estimation
|
||||
self.numberRangeLooks = 200
|
||||
#number of azimuth looks of the interferogram to be unwrapped
|
||||
self.numberAzimuthLooks0 = 5*2
|
||||
#number of range looks of the interferogram to be unwrapped
|
||||
self.numberRangeLooks0 = 20*2
|
||||
|
||||
|
||||
#2. accept the above parameters from topsApp.py
|
||||
if self.usrInput is not None:
|
||||
self.doIon = self.usrInput.ION_doIon
|
||||
self.considerBurstProperties = self.usrInput.ION_considerBurstProperties
|
||||
|
||||
self.ionHeight = self.usrInput.ION_ionHeight * 1000.0
|
||||
self.ionFit = self.usrInput.ION_ionFit
|
||||
self.ionFilteringWinsizeMax = self.usrInput.ION_ionFilteringWinsizeMax
|
||||
self.ionFilteringWinsizeMin = self.usrInput.ION_ionFilteringWinsizeMin
|
||||
self.ionshiftFilteringWinsizeMax = self.usrInput.ION_ionshiftFilteringWinsizeMax
|
||||
self.ionshiftFilteringWinsizeMin = self.usrInput.ION_ionshiftFilteringWinsizeMin
|
||||
self.azshiftFlag = self.usrInput.ION_azshiftFlag
|
||||
self.maskedAreas = self.usrInput.ION_maskedAreas
|
||||
|
||||
self.numberAzimuthLooks = self.usrInput.ION_numberAzimuthLooks
|
||||
self.numberRangeLooks = self.usrInput.ION_numberRangeLooks
|
||||
self.numberAzimuthLooks0 = self.usrInput.ION_numberAzimuthLooks0
|
||||
self.numberRangeLooks0 = self.usrInput.ION_numberRangeLooks0
|
||||
|
||||
|
||||
#3. check parameters
|
||||
#check number of looks
|
||||
if not ((self.numberAzimuthLooks % self.numberAzimuthLooks0 == 0) and \
|
||||
(1 <= self.numberAzimuthLooks0 <= self.numberAzimuthLooks)):
|
||||
raise Exception('numberAzimuthLooks must be integer multiples of numberAzimuthLooks0')
|
||||
if not ((self.numberRangeLooks % self.numberRangeLooks0 == 0) and \
|
||||
(1 <= self.numberRangeLooks0 <= self.numberRangeLooks)):
|
||||
raise Exception('numberRangeLooks must be integer multiples of numberRangeLooks0')
|
||||
###################################################################
|
||||
|
||||
|
||||
#SECTION 2. DIRECTORIES AND FILENAMES
|
||||
#directories
|
||||
self.ionDirname = 'ion'
|
||||
self.lowerDirname = 'lower'
|
||||
self.upperDirname = 'upper'
|
||||
self.ioncalDirname = 'ion_cal'
|
||||
self.ionBurstDirname = 'ion_burst'
|
||||
#these are same directory names as topsApp.py/TopsProc.py
|
||||
#self.referenceSlcProduct = 'reference'
|
||||
#self.secondarySlcProduct = 'secondary'
|
||||
#self.fineCoregDirname = 'fine_coreg'
|
||||
self.fineIfgDirname = 'fine_interferogram'
|
||||
self.mergedDirname = 'merged'
|
||||
#filenames
|
||||
self.ionRawNoProj = 'raw_no_projection.ion'
|
||||
self.ionCorNoProj = 'raw_no_projection.cor'
|
||||
self.ionRaw = 'raw.ion'
|
||||
self.ionCor = 'raw.cor'
|
||||
self.ionFilt = 'filt.ion'
|
||||
self.ionShift = 'azshift.ion'
|
||||
self.warning = 'warning.txt'
|
||||
|
||||
#SECTION 3. DATA PARAMETERS
|
||||
#earth's radius (m)
|
||||
self.earthRadius = 6371 * 1000.0
|
||||
#reference range (m) for moving range center frequency to zero, center of center swath
|
||||
self.rgRef = 875714.0
|
||||
#range bandwidth (Hz) for splitting, range processingBandwidth: [5.650000000000000e+07, 4.830000000000000e+07, 4.278991840322842e+07]
|
||||
self.rgBandwidthForSplit = 40.0 * 10**6
|
||||
self.rgBandwidthSub = self.rgBandwidthForSplit / 3.0
|
||||
|
||||
#SECTION 4. DEFINE WAVELENGTHS AND DETERMINE IF CALCULATE IONOSPHERE WITH MERGED INTERFEROGRAM
|
||||
#Sentinel-1A/B radar wavelengths are the same.
|
||||
self.radarWavelength = 0.05546576
|
||||
self.passDirection = None
|
||||
|
||||
#self.safeObjFirst, self.safeObjSecondary should have already get these parameters
|
||||
#use the 1/3, 1/3, 1/3 scheme for splitting
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
if self.safeObjFirst is not None:
|
||||
#use this to determine which polynomial to use to calculate a ramp when calculating ionosphere for cross A/B interferogram
|
||||
self.passDirection = self.safeObjFirst.passDirection.lower()
|
||||
self.radarWavelength = self.safeObjFirst.radarWavelength
|
||||
self.radarWavelengthLower = SPEED_OF_LIGHT / (SPEED_OF_LIGHT / self.radarWavelength - self.rgBandwidthForSplit / 3.0)
|
||||
self.radarWavelengthUpper = SPEED_OF_LIGHT / (SPEED_OF_LIGHT / self.radarWavelength + self.rgBandwidthForSplit / 3.0)
|
||||
|
||||
|
||||
self.calIonWithMerged = False
|
||||
self.rampRemovel = 0
|
||||
#update the above two parameters depending on self.safeObjFirst and self.safeObjSecondary
|
||||
if (self.safeObjFirst is not None) and (self.safeObjSecondary is not None):
|
||||
#determine if calculate ionosphere using merged interferogram
|
||||
#check if already got parameters needed
|
||||
if hasattr(self.safeObjFirst, 'startingRanges') == False:
|
||||
self.safeObjFirst.get_starting_ranges()
|
||||
if hasattr(self.safeObjSecondary, 'startingRanges') == False:
|
||||
self.safeObjSecondary.get_starting_ranges()
|
||||
if self.safeObjFirst.startingRanges == self.safeObjSecondary.startingRanges:
|
||||
self.calIonWithMerged = False
|
||||
else:
|
||||
self.calIonWithMerged = True
|
||||
#for cross Sentinel-1A/B interferogram, always not using merged interferogram
|
||||
if self.safeObjFirst.platform != self.safeObjSecondary.platform:
|
||||
self.calIonWithMerged = False
|
||||
#there is no need to process swath by swath when there is only one swath
|
||||
#ionSwathBySwath only works when number of swaths >=2
|
||||
#CONSIDER THIS LATTER!!!
|
||||
#if len(swathList) == 1:
|
||||
# self.calIonWithMerged = True
|
||||
|
||||
#determine if remove an empirical ramp
|
||||
if self.safeObjFirst.platform == self.safeObjSecondary.platform:
|
||||
self.rampRemovel = 0
|
||||
else:
|
||||
#estimating ionospheric phase for cross Sentinel-1A/B interferogram
|
||||
#an empirical ramp will be removed from the estimated ionospheric phase
|
||||
if self.safeObjFirst.platform == 'S1A' and self.safeObjSecondary.platform == 'S1B':
|
||||
self.rampRemovel = 1
|
||||
else:
|
||||
self.rampRemovel = -1
|
||||
|
||||
|
||||
class run(object):
|
||||
"""
|
||||
A class representing a run which may contain several functions
|
||||
|
@ -795,6 +1158,390 @@ class run(object):
|
|||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt)
|
||||
del configObj
|
||||
|
||||
|
||||
def subband_and_resamp(self, dateListIon, stackReferenceDate):
|
||||
|
||||
line_cnt = 0
|
||||
for date in dateListIon:
|
||||
configName = os.path.join(self.config_path,'config_subband_and_resamp_{}'.format(date))
|
||||
configObj = config(configName)
|
||||
configObj.configure(self)
|
||||
configObj.reference = os.path.join(self.work_dir, 'reference')
|
||||
configObj.secondary = os.path.join(self.work_dir, 'secondarys', date)
|
||||
configObj.coregdir = os.path.join(self.work_dir, 'coreg_secondarys', date)
|
||||
configObj.azimuth_misreg = os.path.join(self.work_dir, 'misreg', 'azimuth', 'dates', '{}.txt'.format(date))
|
||||
configObj.range_misreg = os.path.join(self.work_dir, 'misreg', 'range', 'dates', '{}.txt'.format(date))
|
||||
if date == stackReferenceDate:
|
||||
configObj.subband('[Function-1]')
|
||||
else:
|
||||
configObj.subband_and_resamp('[Function-1]')
|
||||
configObj.finalize()
|
||||
|
||||
line_cnt += 1
|
||||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess)
|
||||
del configObj
|
||||
|
||||
|
||||
def generateIgram_ion(self, pairs, stackReferenceDate):
|
||||
|
||||
line_cnt = 0
|
||||
for p in pairs:
|
||||
configName = os.path.join(self.config_path,'config_generateIgram_ion_{}_{}'.format(p[0], p[1]))
|
||||
configObj = config(configName)
|
||||
configObj.configure(self)
|
||||
if p[0] == stackReferenceDate:
|
||||
configObj.reference = os.path.join(self.work_dir, 'reference')
|
||||
else:
|
||||
configObj.reference = os.path.join(self.work_dir, 'coreg_secondarys', p[0])
|
||||
if p[1] == stackReferenceDate:
|
||||
configObj.secondary = os.path.join(self.work_dir, 'reference')
|
||||
else:
|
||||
configObj.secondary = os.path.join(self.work_dir, 'coreg_secondarys', p[1])
|
||||
configObj.flatten = 'False'
|
||||
configObj.interferogram_prefix = 'fine'
|
||||
configObj.overlap = 'False'
|
||||
|
||||
configObj.reference_suffix = '_lower'
|
||||
configObj.secondary_suffix = '_lower'
|
||||
configObj.interferogram = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'interferograms')
|
||||
configObj.generateIgram_ion('[Function-1]')
|
||||
|
||||
configObj.reference_suffix = '_upper'
|
||||
configObj.secondary_suffix = '_upper'
|
||||
configObj.interferogram = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'interferograms')
|
||||
configObj.generateIgram_ion('[Function-2]')
|
||||
configObj.finalize()
|
||||
|
||||
line_cnt += 1
|
||||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess)
|
||||
del configObj
|
||||
|
||||
|
||||
def mergeBurstsIon(self, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update):
|
||||
import numpy as np
|
||||
|
||||
swath_list = sorted(self.swath_num.split())
|
||||
nswath = len(swath_list)
|
||||
|
||||
ionParamUsrObj = ionParamUsr(self.param_ion)
|
||||
ionParamUsrObj.configure()
|
||||
|
||||
if nswath == 1:
|
||||
pairs1 = pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update
|
||||
pairs2 = []
|
||||
else:
|
||||
pairs1 = pairs_same_starting_ranges_update
|
||||
pairs2 = pairs_diff_starting_ranges_update
|
||||
|
||||
line_cnt = 0
|
||||
for p in pairs1+pairs2:
|
||||
configName = os.path.join(self.config_path,'config_mergeBurstsIon_{}-{}'.format(p[0], p[1]))
|
||||
configObj = config(configName)
|
||||
configObj.configure(self)
|
||||
|
||||
if p in pairs1:
|
||||
for subband, function in zip(['lower', 'upper'], ['[Function-1]', '[Function-2]']):
|
||||
configObj.reference = os.path.join(self.work_dir, 'reference')
|
||||
configObj.stack = os.path.join(self.work_dir, 'stack')
|
||||
configObj.dirname = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'interferograms')
|
||||
configObj.name_pattern = 'fine_*.int'
|
||||
configObj.outfile = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged', 'fine.int')
|
||||
configObj.nrlks = ionParamUsrObj.ION_numberRangeLooks
|
||||
configObj.nalks = ionParamUsrObj.ION_numberAzimuthLooks
|
||||
configObj.nrlks0 = ionParamUsrObj.ION_numberRangeLooks0
|
||||
configObj.nalks0 = ionParamUsrObj.ION_numberAzimuthLooks0
|
||||
configObj.rvalid = None
|
||||
configObj.swath = None
|
||||
configObj.mergeBurstsIon(function)
|
||||
|
||||
configObj.lower = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine.int')
|
||||
configObj.upper = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'merged', 'fine.int')
|
||||
configObj.coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine.cor')
|
||||
configObj.nrlks = None
|
||||
configObj.nalks = None
|
||||
configObj.coherenceIon('[Function-3]')
|
||||
|
||||
if p in pairs2:
|
||||
num = 2 * nswath
|
||||
subbandAll = ['lower' for i in range(nswath)] + ['upper' for i in range(nswath)]
|
||||
swathAll = swath_list + swath_list
|
||||
functionAll = ['[Function-{}]'.format(i+1) for i in range(num)]
|
||||
for subband, swath, function in zip(subbandAll, swathAll, functionAll):
|
||||
configObj.reference = os.path.join(self.work_dir, 'reference')
|
||||
configObj.stack = os.path.join(self.work_dir, 'stack')
|
||||
configObj.dirname = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'interferograms')
|
||||
configObj.name_pattern = 'fine_*.int'
|
||||
configObj.outfile = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged_IW{}'.format(swath), 'fine.int')
|
||||
configObj.nrlks = ionParamUsrObj.ION_numberRangeLooks
|
||||
configObj.nalks = ionParamUsrObj.ION_numberAzimuthLooks
|
||||
configObj.nrlks0 = ionParamUsrObj.ION_numberRangeLooks0
|
||||
configObj.nalks0 = ionParamUsrObj.ION_numberAzimuthLooks0
|
||||
configObj.rvalid = np.int32(np.around(ionParamUsrObj.ION_numberRangeLooks/8.0))
|
||||
configObj.swath = swath
|
||||
configObj.mergeBurstsIon(function)
|
||||
|
||||
swathAll = swath_list
|
||||
functionAll = ['[Function-{}]'.format(num+i+1) for i in range(nswath)]
|
||||
for swath, function in zip(swathAll, functionAll):
|
||||
configObj.lower = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine.int')
|
||||
configObj.upper = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'merged_IW{}'.format(swath), 'fine.int')
|
||||
configObj.coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine.cor')
|
||||
configObj.nrlks = None
|
||||
configObj.nalks = None
|
||||
configObj.coherenceIon(function)
|
||||
|
||||
configObj.finalize()
|
||||
|
||||
line_cnt += 1
|
||||
#line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess)
|
||||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt)
|
||||
del configObj
|
||||
|
||||
|
||||
def unwrap_ion(self, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update):
|
||||
import numpy as np
|
||||
|
||||
swath_list = sorted(self.swath_num.split())
|
||||
nswath = len(swath_list)
|
||||
|
||||
ionParamUsrObj = ionParamUsr(self.param_ion)
|
||||
ionParamUsrObj.configure()
|
||||
|
||||
if nswath == 1:
|
||||
pairs1 = pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update
|
||||
pairs2 = []
|
||||
else:
|
||||
pairs1 = pairs_same_starting_ranges_update
|
||||
pairs2 = pairs_diff_starting_ranges_update
|
||||
|
||||
line_cnt = 0
|
||||
for p in pairs1+pairs2:
|
||||
configName = os.path.join(self.config_path,'config_unwrap_ion_{}-{}'.format(p[0], p[1]))
|
||||
configObj = config(configName)
|
||||
configObj.configure(self)
|
||||
|
||||
if p in pairs1:
|
||||
for subband, function in zip(['lower', 'upper'], ['[Function-1]', '[Function-2]']):
|
||||
configObj.ifgName = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged', 'fine.int')
|
||||
configObj.unwName = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged', 'fine.unw')
|
||||
configObj.cohName = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine.cor')
|
||||
configObj.noMCF = 'False'
|
||||
configObj.reference = os.path.join(self.work_dir, 'reference')
|
||||
configObj.defoMax = '2'
|
||||
configObj.rangeLooks = '{}'.format(ionParamUsrObj.ION_numberRangeLooks0)
|
||||
configObj.azimuthLooks = '{}'.format(ionParamUsrObj.ION_numberAzimuthLooks0)
|
||||
configObj.rmfilter = False
|
||||
configObj.unwMethod = 'snaphu'
|
||||
configObj.unwrap(function)
|
||||
|
||||
if p in pairs2:
|
||||
num = 2 * nswath
|
||||
subbandAll = ['lower' for i in range(nswath)] + ['upper' for i in range(nswath)]
|
||||
swathAll = swath_list + swath_list
|
||||
functionAll = ['[Function-{}]'.format(i+1) for i in range(num)]
|
||||
for subband, swath, function in zip(subbandAll, swathAll, functionAll):
|
||||
configObj.ifgName = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged_IW{}'.format(swath), 'fine.int')
|
||||
configObj.unwName = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged_IW{}'.format(swath), 'fine.unw')
|
||||
configObj.cohName = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine.cor')
|
||||
configObj.noMCF = 'False'
|
||||
configObj.reference = os.path.join(self.work_dir, 'reference')
|
||||
configObj.defoMax = '2'
|
||||
configObj.rangeLooks = '{}'.format(ionParamUsrObj.ION_numberRangeLooks0)
|
||||
configObj.azimuthLooks = '{}'.format(ionParamUsrObj.ION_numberAzimuthLooks0)
|
||||
configObj.rmfilter = False
|
||||
configObj.unwMethod = 'snaphu'
|
||||
configObj.unwrap(function)
|
||||
|
||||
configObj.finalize()
|
||||
|
||||
line_cnt += 1
|
||||
#line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess)
|
||||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt)
|
||||
del configObj
|
||||
|
||||
|
||||
def look_ion(self, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update):
|
||||
import numpy as np
|
||||
|
||||
swath_list = sorted(self.swath_num.split())
|
||||
nswath = len(swath_list)
|
||||
|
||||
ionParamUsrObj = ionParamUsr(self.param_ion)
|
||||
ionParamUsrObj.configure()
|
||||
|
||||
if nswath == 1:
|
||||
pairs1 = pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update
|
||||
pairs2 = []
|
||||
else:
|
||||
pairs1 = pairs_same_starting_ranges_update
|
||||
pairs2 = pairs_diff_starting_ranges_update
|
||||
|
||||
line_cnt = 0
|
||||
for p in pairs1+pairs2:
|
||||
configName = os.path.join(self.config_path,'config_look_ion_{}-{}'.format(p[0], p[1]))
|
||||
configObj = config(configName)
|
||||
configObj.configure(self)
|
||||
|
||||
if p in pairs1:
|
||||
for subband, function in zip(['lower', 'upper'], ['[Function-1]', '[Function-2]']):
|
||||
configObj.unw = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged', 'fine.unw')
|
||||
configObj.cor = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine.cor')
|
||||
configObj.output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged', 'fine_look.unw')
|
||||
configObj.nrlks = np.int32(np.around(ionParamUsrObj.ION_numberRangeLooks / ionParamUsrObj.ION_numberRangeLooks0))
|
||||
configObj.nalks = np.int32(np.around(ionParamUsrObj.ION_numberAzimuthLooks / ionParamUsrObj.ION_numberAzimuthLooks0))
|
||||
configObj.lookUnwIon(function)
|
||||
|
||||
configObj.lower = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine.int')
|
||||
configObj.upper = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'merged', 'fine.int')
|
||||
configObj.coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine_look.cor')
|
||||
#configObj.nrlks = np.int32(np.around(ionParamUsrObj.ION_numberRangeLooks / ionParamUsrObj.ION_numberRangeLooks0))
|
||||
#configObj.nalks = np.int32(np.around(ionParamUsrObj.ION_numberAzimuthLooks / ionParamUsrObj.ION_numberAzimuthLooks0))
|
||||
configObj.coherenceIon('[Function-3]')
|
||||
|
||||
if p in pairs2:
|
||||
num = 2 * nswath
|
||||
subbandAll = ['lower' for i in range(nswath)] + ['upper' for i in range(nswath)]
|
||||
swathAll = swath_list + swath_list
|
||||
functionAll = ['[Function-{}]'.format(i+1) for i in range(num)]
|
||||
for subband, swath, function in zip(subbandAll, swathAll, functionAll):
|
||||
configObj.unw = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged_IW{}'.format(swath), 'fine.unw')
|
||||
configObj.cor = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine.cor')
|
||||
configObj.output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), '{}'.format(subband), 'merged_IW{}'.format(swath), 'fine_look.unw')
|
||||
configObj.nrlks = np.int32(np.around(ionParamUsrObj.ION_numberRangeLooks / ionParamUsrObj.ION_numberRangeLooks0))
|
||||
configObj.nalks = np.int32(np.around(ionParamUsrObj.ION_numberAzimuthLooks / ionParamUsrObj.ION_numberAzimuthLooks0))
|
||||
configObj.lookUnwIon(function)
|
||||
|
||||
swathAll = swath_list
|
||||
functionAll = ['[Function-{}]'.format(num+i+1) for i in range(nswath)]
|
||||
for swath, function in zip(swathAll, functionAll):
|
||||
configObj.lower = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine.int')
|
||||
configObj.upper = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'merged_IW{}'.format(swath), 'fine.int')
|
||||
configObj.coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine_look.cor')
|
||||
#configObj.nrlks = np.int32(np.around(ionParamUsrObj.ION_numberRangeLooks / ionParamUsrObj.ION_numberRangeLooks0))
|
||||
#configObj.nalks = np.int32(np.around(ionParamUsrObj.ION_numberAzimuthLooks / ionParamUsrObj.ION_numberAzimuthLooks0))
|
||||
configObj.coherenceIon(function)
|
||||
|
||||
|
||||
configObj.finalize()
|
||||
|
||||
line_cnt += 1
|
||||
#line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess)
|
||||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt)
|
||||
del configObj
|
||||
|
||||
|
||||
def computeIon(self, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict):
|
||||
import numpy as np
|
||||
|
||||
swath_list = sorted(self.swath_num.split())
|
||||
nswath = len(swath_list)
|
||||
|
||||
ionParamUsrObj = ionParamUsr(self.param_ion)
|
||||
ionParamUsrObj.configure()
|
||||
|
||||
if nswath == 1:
|
||||
pairs1 = pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update
|
||||
pairs2 = []
|
||||
else:
|
||||
pairs1 = pairs_same_starting_ranges_update
|
||||
pairs2 = pairs_diff_starting_ranges_update
|
||||
|
||||
|
||||
for p in pairs1+pairs2:
|
||||
ionParamObj = ionParam(usrInput=ionParamUsrObj, safeObjFirst=safe_dict[p[0]], safeObjSecondary=safe_dict[p[1]])
|
||||
ionParamObj.configure()
|
||||
|
||||
#do not use SentinelWrapper.py, as it does not support 2-d list masked_areas
|
||||
#configName = os.path.join(self.config_path,'config_mergeBurstsIon_{}-{}'.format(p[0], p[1]))
|
||||
#configObj = config(configName)
|
||||
#configObj.configure(self)
|
||||
|
||||
if p in pairs1:
|
||||
lower = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine_look.unw')
|
||||
upper = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'merged', 'fine_look.unw')
|
||||
coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged', 'fine_look.cor')
|
||||
ionosphere = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'raw_no_projection.ion')
|
||||
coherence_output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'raw_no_projection.cor')
|
||||
masked_areas = ''
|
||||
if ionParamObj.maskedAreas is not None:
|
||||
for m in ionParamObj.maskedAreas:
|
||||
masked_areas += ' --masked_areas {} {} {} {}'.format(m[0], m[1], m[2], m[3])
|
||||
|
||||
cmd = 'computeIon.py --lower {} --upper {} --coherence {} --ionosphere {} --coherence_output {}{}'.format(
|
||||
lower, upper, coherence, ionosphere, coherence_output, masked_areas)
|
||||
|
||||
self.runf.write(self.text_cmd + cmd + '\n')
|
||||
|
||||
|
||||
if p in pairs2:
|
||||
for swath in swath_list:
|
||||
lower = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine_look.unw')
|
||||
upper = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'upper', 'merged_IW{}'.format(swath), 'fine_look.unw')
|
||||
coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'lower', 'merged_IW{}'.format(swath), 'fine_look.cor')
|
||||
ionosphere = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal_IW{}'.format(swath), 'raw_no_projection.ion')
|
||||
coherence_output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal_IW{}'.format(swath), 'raw_no_projection.cor')
|
||||
masked_areas = ''
|
||||
if ionParamObj.maskedAreas is not None:
|
||||
for m in ionParamObj.maskedAreas:
|
||||
masked_areas += ' --masked_areas {} {} {} {}'.format(m[0], m[1], m[2], m[3])
|
||||
|
||||
cmd = 'computeIon.py --lower {} --upper {} --coherence {} --ionosphere {} --coherence_output {}{}'.format(
|
||||
lower, upper, coherence, ionosphere, coherence_output, masked_areas)
|
||||
|
||||
self.runf.write(self.text_cmd + cmd + '\n')
|
||||
|
||||
#merge swaths
|
||||
reference = os.path.join(self.work_dir, 'reference')
|
||||
stack = os.path.join(self.work_dir, 'stack')
|
||||
input0 = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]))
|
||||
output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal')
|
||||
nrlks = ionParamObj.numberRangeLooks
|
||||
nalks = ionParamObj.numberAzimuthLooks
|
||||
remove_ramp = ionParamObj.rampRemovel
|
||||
|
||||
cmd = 'mergeSwathIon.py --reference {} --stack {} --input {} --output {} --nrlks {} --nalks {} --remove_ramp {}'.format(
|
||||
reference, stack, input0, output, nrlks, nalks, remove_ramp)
|
||||
self.runf.write(self.text_cmd + cmd + '\n')
|
||||
|
||||
|
||||
def filtIon(self, pairs):
|
||||
|
||||
ionParamUsrObj = ionParamUsr(self.param_ion)
|
||||
ionParamUsrObj.configure()
|
||||
|
||||
line_cnt = 0
|
||||
for p in pairs:
|
||||
configName = os.path.join(self.config_path,'config_filtIon_{}_{}'.format(p[0], p[1]))
|
||||
configObj = config(configName)
|
||||
configObj.configure(self)
|
||||
configObj.input = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'raw_no_projection.ion')
|
||||
configObj.coherence = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'raw_no_projection.cor')
|
||||
configObj.output = os.path.join(self.work_dir, 'ion', '{}_{}'.format(p[0], p[1]), 'ion_cal', 'filt.ion')
|
||||
configObj.win_min = ionParamUsrObj.ION_ionFilteringWinsizeMin
|
||||
configObj.win_max = ionParamUsrObj.ION_ionFilteringWinsizeMax
|
||||
configObj.filtIon('[Function-1]')
|
||||
configObj.finalize()
|
||||
|
||||
line_cnt += 1
|
||||
#line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt, self.numProcess)
|
||||
line_cnt = configObj.write_wrapper_config2run_file(configName, line_cnt)
|
||||
del configObj
|
||||
|
||||
|
||||
def invertIon(self):
|
||||
|
||||
ionParamUsrObj = ionParamUsr(self.param_ion)
|
||||
ionParamUsrObj.configure()
|
||||
|
||||
ion_in = os.path.join(self.work_dir,'ion')
|
||||
ion_out = os.path.join(self.work_dir,'ion_dates')
|
||||
hgt = os.path.join(self.work_dir,'merged/geom_reference/hgt.rdr')
|
||||
|
||||
cmd = 'invertIon.py --idir {} --odir {} --nrlks1 {} --nalks1 {} --nrlks2 {} --nalks2 {} --merged_geom {} --interp --msk_overlap'.format(ion_in,ion_out,ionParamUsrObj.ION_numberRangeLooks, ionParamUsrObj.ION_numberAzimuthLooks, self.rangeLooks, self.azimuthLooks,hgt)
|
||||
|
||||
self.runf.write(self.text_cmd + cmd + '\n')
|
||||
|
||||
|
||||
def finalize(self):
|
||||
self.runf.close()
|
||||
#writeJobFile(self.run_outname)
|
||||
|
@ -837,6 +1584,35 @@ class sentinelSLC(object):
|
|||
self.SNWE=[min(lats),max(lats),min(lons),max(lons)]
|
||||
|
||||
|
||||
def get_starting_ranges(self, safe=None):
|
||||
import zipfile
|
||||
import xml.etree.ElementTree as ET
|
||||
from isceobj.Planet.AstronomicalHandbook import Const
|
||||
|
||||
#if safe file not set, use first slice in the safe list sorted by starting time
|
||||
if safe is None:
|
||||
safe = sorted(self.safe_file.split(), key=lambda x: x.split('_')[-5], reverse=False)[0]
|
||||
zf = zipfile.ZipFile(safe, 'r')
|
||||
anna = sorted([item for item in zf.namelist() if '.SAFE/annotation/s1' in item])
|
||||
#dual polarization. for the same swath, the slant ranges of two polarizations should be the same.
|
||||
if len(anna) == 6:
|
||||
anna = anna[1:6:2]
|
||||
|
||||
startingRange = []
|
||||
for k in range(3):
|
||||
xmlstr = zf.read(anna[k])
|
||||
root = ET.fromstring(xmlstr)
|
||||
#startingRange computation exactly same as those in components/isceobj/Sensor/TOPS/Sentinel1.py used
|
||||
#by topsApp.py and topsStack
|
||||
startingRange.append(
|
||||
float(root.find('imageAnnotation/imageInformation/slantRangeTime').text)*Const.c/2.0
|
||||
)
|
||||
|
||||
if k == 0:
|
||||
self.radarWavelength = Const.c / float(root.find('generalAnnotation/productInformation/radarFrequency').text)
|
||||
self.passDirection = root.find('generalAnnotation/productInformation/pass').text
|
||||
|
||||
self.startingRanges = startingRange
|
||||
|
||||
|
||||
def getkmlQUAD(self,safe):
|
||||
|
@ -1033,7 +1809,7 @@ class sentinelSLC(object):
|
|||
orbit_start_date_time = datetime.datetime.strptime(fields[6].replace('V',''), datefmt)
|
||||
orbit_stop_date_time = datetime.datetime.strptime(fields[7].replace('.EOF',''), datefmt)
|
||||
if self.start_date_time >= orbit_start_date_time and self.stop_date_time < orbit_stop_date_time:
|
||||
print ("restituted orbit already exists.")
|
||||
print ("restituted or precise orbit already exists.")
|
||||
self.orbit = orbitFile
|
||||
self.orbitType = 'restituted'
|
||||
|
||||
|
|
|
@ -0,0 +1,80 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import os
|
||||
import copy
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import s1a_isce_utils as ut
|
||||
from isceobj.TopsProc.runMergeBursts import mergeBox
|
||||
from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser( description='adjust valid samples by considering number of looks')
|
||||
|
||||
parser.add_argument('-i', '--input', dest='input', type=str, required=True,
|
||||
help='Directory with input acquistion')
|
||||
parser.add_argument('-o', '--output', dest='output', type=str, required=True,
|
||||
help='Directory with output')
|
||||
parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1,
|
||||
help='Number of range looks. Default: 1')
|
||||
parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1,
|
||||
help='Number of azimuth looks. Default: 1')
|
||||
|
||||
return parser
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
adjust valid samples by considering number of looks
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
|
||||
swathList = sorted(ut.getSwathList(inps.input))
|
||||
|
||||
frames=[]
|
||||
for swath in swathList:
|
||||
frame = ut.loadProduct( os.path.join(inps.input , 'IW{0}.xml'.format(swath)))
|
||||
minBurst = frame.bursts[0].burstNumber
|
||||
maxBurst = frame.bursts[-1].burstNumber
|
||||
|
||||
if minBurst==maxBurst:
|
||||
print('Skipping processing of swath {0}'.format(swath))
|
||||
continue
|
||||
|
||||
frames.append(frame)
|
||||
|
||||
|
||||
if inps.nrlks != 1 or inps.nalks != 1:
|
||||
print('updating swath xml')
|
||||
box = mergeBox(frames)
|
||||
#adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS
|
||||
#here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks.
|
||||
(burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frames, box, inps.nalks, inps.nrlks, edge=0, avalid='strict', rvalid='strict')
|
||||
else:
|
||||
print('number of range and azimuth looks are all equal to 1, no need to update swath xml')
|
||||
|
||||
for swath in swathList:
|
||||
print('writing ', os.path.join(inps.output , 'IW{0}.xml'.format(swath)))
|
||||
os.makedirs(os.path.join(inps.output, 'IW{0}'.format(swath)), exist_ok=True)
|
||||
ut.saveProduct(frames[swath-1], os.path.join(inps.output , 'IW{0}.xml'.format(swath)))
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,185 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import os
|
||||
import copy
|
||||
import glob
|
||||
import shutil
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import s1a_isce_utils as ut
|
||||
from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct
|
||||
from isceobj.TopsProc.runIon import renameFile
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser(description='check overlap among all acquisitons')
|
||||
parser.add_argument('-r', '--reference', dest='reference', type=str, required=True,
|
||||
help='directory with reference acquistion')
|
||||
parser.add_argument('-s', '--secondarys', dest='secondarys', type=str, required=True,
|
||||
help='directory with secondarys acquistions')
|
||||
parser.add_argument('-g', '--geom_reference', dest='geom_reference', type=str, default=None,
|
||||
help='directory with geometry of reference')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
check overlap among all acquistions, only keep the bursts that in the common overlap,
|
||||
and then renumber the bursts.
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
referenceDir = inps.reference
|
||||
secondaryDir = sorted(glob.glob(os.path.join(inps.secondarys, '*')))
|
||||
|
||||
acquistionDir = [referenceDir] + secondaryDir
|
||||
|
||||
|
||||
invalidSwath = []
|
||||
for i in [1, 2, 3]:
|
||||
for x in acquistionDir:
|
||||
if not (os.path.isdir(os.path.join(x, 'IW{}'.format(i))) and os.path.isfile(os.path.join(x, 'IW{}.xml'.format(i)))):
|
||||
invalidSwath.append(i)
|
||||
break
|
||||
|
||||
if invalidSwath == [1, 2, 3]:
|
||||
raise Exception('there are no common swaths among the acquistions')
|
||||
else:
|
||||
validSwath = [i for i in [1, 2, 3] if i not in invalidSwath]
|
||||
print('valid swath from scanning acquistion directory: {}'.format(validSwath))
|
||||
|
||||
|
||||
invalidSwath2 = []
|
||||
for swath in validSwath:
|
||||
referenceSwath = ut.loadProduct(os.path.join(referenceDir, 'IW{0}.xml'.format(swath)))
|
||||
|
||||
burstoffsetAll = []
|
||||
minBurstAll = []
|
||||
maxBurstAll = []
|
||||
secondarySwathAll = []
|
||||
for secondaryDirX in secondaryDir:
|
||||
secondarySwath = ut.loadProduct(os.path.join(secondaryDirX, 'IW{0}.xml'.format(swath)))
|
||||
|
||||
secondarySwathAll.append(secondarySwath)
|
||||
|
||||
burstoffset, minBurst, maxBurst = referenceSwath.getCommonBurstLimits(secondarySwath)
|
||||
burstoffsetAll.append(burstoffset)
|
||||
minBurstAll.append(minBurst)
|
||||
maxBurstAll.append(maxBurst)
|
||||
|
||||
minBurst = max(minBurstAll)
|
||||
maxBurst = min(maxBurstAll)
|
||||
|
||||
numBurst = maxBurst - minBurst
|
||||
|
||||
|
||||
|
||||
if minBurst >= maxBurst:
|
||||
invalidSwath2.append(swath)
|
||||
else:
|
||||
#add reference
|
||||
swathAll = [referenceSwath] + secondarySwathAll
|
||||
burstoffsetAll = [0] + burstoffsetAll
|
||||
|
||||
for dirx, swathx, burstoffsetx in zip(acquistionDir, swathAll, burstoffsetAll):
|
||||
|
||||
swathTmp = createTOPSSwathSLCProduct()
|
||||
swathTmp.configure()
|
||||
|
||||
#change reserved burst properties and remove non-overlap bursts
|
||||
for jj in range(len(swathx.bursts)):
|
||||
ii = jj - burstoffsetx
|
||||
#burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), os.path.basename(swathx.bursts[jj].image.filename))
|
||||
burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), 'burst_%02d'%(jj+1) + '.slc')
|
||||
if minBurst <= ii < maxBurst:
|
||||
kk = ii - minBurst
|
||||
#change burst properties
|
||||
swathx.bursts[jj].burstNumber = kk + 1
|
||||
swathx.bursts[jj].image.filename = os.path.join(os.path.dirname(swathx.bursts[jj].image.filename), 'burst_%02d'%(kk+1) + '.slc')
|
||||
swathTmp.bursts.append(swathx.bursts[jj])
|
||||
else:
|
||||
#remove non-overlap bursts
|
||||
#os.remove(burstFileName)
|
||||
os.remove(burstFileName+'.vrt')
|
||||
os.remove(burstFileName+'.xml')
|
||||
#remove geometry files accordingly if provided
|
||||
if dirx == referenceDir:
|
||||
if inps.geom_reference is not None:
|
||||
for fileType in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask']:
|
||||
geomFileName = os.path.join(os.path.abspath(inps.geom_reference), 'IW{}'.format(swath), fileType + '_%02d'%(jj+1) + '.rdr')
|
||||
os.remove(geomFileName)
|
||||
os.remove(geomFileName+'.vrt')
|
||||
os.remove(geomFileName+'.xml')
|
||||
|
||||
|
||||
#change reserved burst file names
|
||||
for jj in range(len(swathx.bursts)):
|
||||
ii = jj - burstoffsetx
|
||||
#burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), os.path.basename(swathx.bursts[jj].image.filename))
|
||||
burstFileName = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), 'burst_%02d'%(jj+1) + '.slc')
|
||||
if minBurst <= ii < maxBurst:
|
||||
kk = ii - minBurst
|
||||
burstFileNameNew = os.path.join(os.path.abspath(dirx), 'IW{}'.format(swath), 'burst_%02d'%(kk+1) + '.slc')
|
||||
if burstFileName != burstFileNameNew:
|
||||
img = isceobj.createImage()
|
||||
img.load(burstFileName + '.xml')
|
||||
img.setFilename(burstFileNameNew)
|
||||
#img.extraFilename = burstFileNameNew+'.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
#still use original vrt
|
||||
os.remove(burstFileName+'.xml')
|
||||
os.remove(burstFileNameNew+'.vrt')
|
||||
os.rename(burstFileName+'.vrt', burstFileNameNew+'.vrt')
|
||||
#change geometry file names accordingly if provided
|
||||
if dirx == referenceDir:
|
||||
if inps.geom_reference is not None:
|
||||
for fileType in ['hgt', 'incLocal', 'lat', 'lon', 'los', 'shadowMask']:
|
||||
geomFileName = os.path.join(os.path.abspath(inps.geom_reference), 'IW{}'.format(swath), fileType + '_%02d'%(jj+1) + '.rdr')
|
||||
geomFileNameNew = os.path.join(os.path.abspath(inps.geom_reference), 'IW{}'.format(swath), fileType + '_%02d'%(kk+1) + '.rdr')
|
||||
if geomFileName != geomFileNameNew:
|
||||
renameFile(geomFileName, geomFileNameNew)
|
||||
|
||||
|
||||
#change swath properties
|
||||
swathx.bursts = swathTmp.bursts
|
||||
swathx.numberOfBursts = numBurst
|
||||
|
||||
#remove original and write new
|
||||
os.remove( os.path.join(dirx, 'IW{}.xml'.format(swath)) )
|
||||
ut.saveProduct(swathx, os.path.join(dirx, 'IW{}.xml'.format(swath)))
|
||||
|
||||
|
||||
#remove invalid swaths
|
||||
invalidSwath3 = list(sorted(set(invalidSwath+invalidSwath2)))
|
||||
for swath in invalidSwath3:
|
||||
for dirx in acquistionDir:
|
||||
iwdir = os.path.join(dirx, 'IW{}'.format(swath))
|
||||
iwxml = os.path.join(dirx, 'IW{}.xml'.format(swath))
|
||||
if os.path.isdir(iwdir):
|
||||
shutil.rmtree(iwdir)
|
||||
if os.path.isfile(iwxml):
|
||||
os.remove(iwxml)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,95 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import os
|
||||
import copy
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj.TopsProc.runIon import cal_coherence
|
||||
from isceobj.TopsProc.runIon import multilook
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser(description='compute coherence using only differential interferograms')
|
||||
parser.add_argument('-l', '--lower', dest='lower', type=str, required=True,
|
||||
help='lower band interferogram')
|
||||
parser.add_argument('-u', '--upper', dest='upper', type=str, required=True,
|
||||
help='upper band interferogram')
|
||||
parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True,
|
||||
help='output coherence')
|
||||
parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1,
|
||||
help='number of range looks. Default: 1')
|
||||
parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1,
|
||||
help='number of azimuth looks. Default: 1')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
os.makedirs(os.path.dirname(inps.coherence), exist_ok=True)
|
||||
|
||||
#The orginal coherence calculated by topsApp.py is not good at all, use the following coherence instead
|
||||
#lowerintfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.mergedIfgname)
|
||||
#upperintfile = os.path.join(ionParam.ionDirname, ionParam.upperDirname, ionParam.mergedDirname, self._insar.mergedIfgname)
|
||||
#corfile = os.path.join(ionParam.ionDirname, ionParam.lowerDirname, ionParam.mergedDirname, self._insar.correlationFilename)
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(inps.lower + '.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
lowerint = np.fromfile(inps.lower, dtype=np.complex64).reshape(length, width)
|
||||
upperint = np.fromfile(inps.upper, dtype=np.complex64).reshape(length, width)
|
||||
|
||||
if (inps.nrlks != 1) or (inps.nalks != 1):
|
||||
width = np.int(width/inps.nrlks)
|
||||
length = np.int(length/inps.nalks)
|
||||
lowerint = multilook(lowerint, inps.nalks, inps.nrlks)
|
||||
upperint = multilook(upperint, inps.nalks, inps.nrlks)
|
||||
|
||||
#compute coherence only using interferogram
|
||||
#here I use differential interferogram of lower and upper band interferograms
|
||||
#so that coherence is not affected by fringes
|
||||
cord = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4)
|
||||
cor = np.zeros((length*2, width), dtype=np.float32)
|
||||
cor[0:length*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 )
|
||||
cor[1:length*2:2, :] = cord
|
||||
cor.astype(np.float32).tofile(inps.coherence)
|
||||
|
||||
#create xml and vrt
|
||||
#img.scheme = 'BIL'
|
||||
#img.bands = 2
|
||||
#img.filename = corfile
|
||||
#img.renderHdr()
|
||||
|
||||
#img = isceobj.Image.createUnwImage()
|
||||
img = isceobj.createOffsetImage()
|
||||
img.setFilename(inps.coherence)
|
||||
img.extraFilename = inps.coherence + '.vrt'
|
||||
img.setWidth(width)
|
||||
img.setLength(length)
|
||||
img.renderHdr()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,135 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import os
|
||||
import copy
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
from isceobj.TopsProc.runIon import computeIonosphere
|
||||
from isceobj.Alos2Proc.runIonFilt import reformatMaskedAreas
|
||||
|
||||
from Stack import ionParam
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser(description='compute ionosphere using lower and upper band interferograms')
|
||||
parser.add_argument('-l', '--lower', dest='lower', type=str, required=True,
|
||||
help='lower band interferogram')
|
||||
parser.add_argument('-u', '--upper', dest='upper', type=str, required=True,
|
||||
help='upper band interferogram')
|
||||
parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True,
|
||||
help='input coherence')
|
||||
parser.add_argument('-i', '--ionosphere', dest='ionosphere', type=str, required=True,
|
||||
help='output ionosphere')
|
||||
parser.add_argument('-o', '--coherence_output', dest='coherence_output', type=str, required=True,
|
||||
help='output coherence file name. simply copy input coherence')
|
||||
parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', action='append', default=None,
|
||||
help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 --masked_areas 110 120 110 120')
|
||||
#parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', default=None,
|
||||
# help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 110 120 110 120')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
# #convert 1-d list to 2-d list
|
||||
# if len(inps.masked_areas) % 4 != 0:
|
||||
# raise Exception('each maksed area must have four elements')
|
||||
# else:
|
||||
# masked_areas = []
|
||||
# n = np.int32(len(inps.masked_areas)/4)
|
||||
# for i in range(n):
|
||||
# masked_areas.append([inps.masked_areas[i*4+0], inps.masked_areas[i*4+1], inps.masked_areas[i*4+2], inps.masked_areas[i*4+3]])
|
||||
# inps.masked_areas = masked_areas
|
||||
|
||||
###################################
|
||||
#SET PARAMETERS HERE
|
||||
#THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self)
|
||||
corThresholdAdj = 0.85
|
||||
###################################
|
||||
|
||||
print('computing ionosphere')
|
||||
#get files
|
||||
lowerUnwfile = inps.lower
|
||||
upperUnwfile = inps.upper
|
||||
corfile = inps.coherence
|
||||
|
||||
#use image size from lower unwrapped interferogram
|
||||
img = isceobj.createImage()
|
||||
img.load(lowerUnwfile + '.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
#lowerAmp = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
#upperAmp = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
#amp = np.sqrt(lowerAmp**2+upperAmp**2)
|
||||
amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
|
||||
#masked out user-specified areas
|
||||
if inps.masked_areas != None:
|
||||
maskedAreas = reformatMaskedAreas(inps.masked_areas, length, width)
|
||||
for area in maskedAreas:
|
||||
lowerUnw[area[0]:area[1], area[2]:area[3]] = 0
|
||||
upperUnw[area[0]:area[1], area[2]:area[3]] = 0
|
||||
cor[area[0]:area[1], area[2]:area[3]] = 0
|
||||
|
||||
ionParamObj=ionParam()
|
||||
ionParamObj.configure()
|
||||
|
||||
#compute ionosphere
|
||||
fl = SPEED_OF_LIGHT / ionParamObj.radarWavelengthLower
|
||||
fu = SPEED_OF_LIGHT / ionParamObj.radarWavelengthUpper
|
||||
adjFlag = 1
|
||||
ionos = computeIonosphere(lowerUnw, upperUnw, cor, fl, fu, adjFlag, corThresholdAdj, 0)
|
||||
|
||||
#dump ionosphere
|
||||
outFilename = inps.ionosphere
|
||||
os.makedirs(os.path.dirname(inps.ionosphere), exist_ok=True)
|
||||
|
||||
ion = np.zeros((length*2, width), dtype=np.float32)
|
||||
ion[0:length*2:2, :] = amp
|
||||
ion[1:length*2:2, :] = ionos
|
||||
ion.astype(np.float32).tofile(outFilename)
|
||||
img.filename = outFilename
|
||||
img.extraFilename = outFilename + '.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
#dump coherence
|
||||
outFilename = inps.coherence_output
|
||||
os.makedirs(os.path.dirname(inps.coherence_output), exist_ok=True)
|
||||
|
||||
ion[1:length*2:2, :] = cor
|
||||
ion.astype(np.float32).tofile(outFilename)
|
||||
img.filename = outFilename
|
||||
img.extraFilename = outFilename + '.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,138 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import os
|
||||
import copy
|
||||
import glob
|
||||
import shutil
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj.TopsProc.runIon import adaptive_gaussian
|
||||
from isceobj.TopsProc.runIon import weight_fitting
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser(description='filtering ionosphere')
|
||||
parser.add_argument('-i', '--input', dest='input', type=str, required=True,
|
||||
help='input ionosphere')
|
||||
parser.add_argument('-c', '--coherence', dest='coherence', type=str, required=True,
|
||||
help='coherence')
|
||||
parser.add_argument('-o', '--output', dest='output', type=str, required=True,
|
||||
help='output ionosphere')
|
||||
parser.add_argument('-a', '--win_min', dest='win_min', type=int, default=100,
|
||||
help='minimum filtering window size')
|
||||
parser.add_argument('-b', '--win_max', dest='win_max', type=int, default=200,
|
||||
help='maximum filtering window size')
|
||||
#parser.add_argument('-m', '--masked_areas', dest='masked_areas', type=int, nargs='+', action='append', default=None,
|
||||
# help='This is a 2-d list. Each element in the 2-D list is a four-element list: [firstLine, lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/lastColumn instead. e.g. two areas masked out: --masked_areas 10 20 10 20 --masked_areas 110 120 110 120')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
check overlap among all acquistions, only keep the bursts that in the common overlap,
|
||||
and then renumber the bursts.
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
'''
|
||||
This function filters image using gaussian filter
|
||||
|
||||
we projected the ionosphere value onto the ionospheric layer, and the indexes are integers.
|
||||
this reduces the number of samples used in filtering
|
||||
a better method is to project the indexes onto the ionospheric layer. This way we have orginal
|
||||
number of samples used in filtering. but this requries more complicated operation in filtering
|
||||
currently not implemented.
|
||||
a less accurate method is to use ionsphere without any projection
|
||||
'''
|
||||
|
||||
#################################################
|
||||
#SET PARAMETERS HERE
|
||||
#if applying polynomial fitting
|
||||
#False: no fitting, True: with fitting
|
||||
fit = True
|
||||
#gaussian filtering window size
|
||||
size_max = inps.win_max
|
||||
size_min = inps.win_min
|
||||
|
||||
#THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self)
|
||||
corThresholdIon = 0.85
|
||||
#################################################
|
||||
|
||||
print('filtering ionosphere')
|
||||
#I find it's better to use ionosphere that is not projected, it's mostly slowlying changing anyway.
|
||||
#this should also be better for operational use.
|
||||
ionfile = inps.input
|
||||
#since I decide to use ionosphere that is not projected, I should also use coherence that is not projected.
|
||||
corfile = inps.coherence
|
||||
|
||||
#use ionosphere and coherence that are projected.
|
||||
#ionfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionRaw)
|
||||
#corfile = os.path.join(ionParam.ionDirname, ionParam.ioncalDirname, ionParam.ionCor)
|
||||
|
||||
outfile = inps.output
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(ionfile + '.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
ion = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
|
||||
########################################################################################
|
||||
#AFTER COHERENCE IS RESAMPLED AT grd2ion, THERE ARE SOME WIRED VALUES
|
||||
cor[np.nonzero(cor<0)] = 0.0
|
||||
cor[np.nonzero(cor>1)] = 0.0
|
||||
########################################################################################
|
||||
|
||||
ion_fit = weight_fitting(ion, cor, width, length, 1, 1, 1, 1, 2, corThresholdIon)
|
||||
|
||||
#no fitting
|
||||
if fit == False:
|
||||
ion_fit *= 0
|
||||
|
||||
ion -= ion_fit * (ion!=0)
|
||||
|
||||
#minimize the effect of low coherence pixels
|
||||
#cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001
|
||||
#filt = adaptive_gaussian(ion, cor, size_max, size_min)
|
||||
#cor**14 should be a good weight to use. 22-APR-2018
|
||||
filt = adaptive_gaussian(ion, cor**14, size_max, size_min)
|
||||
|
||||
filt += ion_fit * (filt!=0)
|
||||
|
||||
#do not mask now as there is interpolation after least squares estimation of each date ionosphere
|
||||
#filt *= (amp!=0)
|
||||
|
||||
ion = np.zeros((length*2, width), dtype=np.float32)
|
||||
ion[0:length*2:2, :] = amp
|
||||
ion[1:length*2:2, :] = filt
|
||||
ion.astype(np.float32).tofile(outfile)
|
||||
img.filename = outfile
|
||||
img.extraFilename = outfile + '.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -22,9 +22,15 @@ def createParser():
|
|||
parser.add_argument('-m', '--reference', dest='reference', type=str, required=True,
|
||||
help='Directory with reference acquisition')
|
||||
|
||||
parser.add_argument('-x', '--reference_suffix', dest='reference_suffix', type=str, default=None,
|
||||
help='reference burst file name suffix')
|
||||
|
||||
parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True,
|
||||
help='Directory with secondary acquisition')
|
||||
|
||||
parser.add_argument('-y', '--secondary_suffix', dest='secondary_suffix', type=str, default=None,
|
||||
help='secondary burst file name suffix')
|
||||
|
||||
parser.add_argument('-f', '--flatten', dest='flatten', action='store_true', default=False,
|
||||
help='Flatten the interferograms with offsets if needed')
|
||||
|
||||
|
@ -177,6 +183,11 @@ def main(iargs=None):
|
|||
referencename = reference.image.filename
|
||||
secondaryname = secondary.image.filename
|
||||
|
||||
if inps.reference_suffix is not None:
|
||||
referencename = os.path.splitext(referencename)[0] + inps.reference_suffix + os.path.splitext(referencename)[1]
|
||||
if inps.secondary_suffix is not None:
|
||||
secondaryname = os.path.splitext(secondaryname)[0] + inps.secondary_suffix + os.path.splitext(secondaryname)[1]
|
||||
|
||||
if inps.overlap:
|
||||
rdict = { 'rangeOff1' : os.path.join(inps.reference, 'overlap', IWstr, 'range_top_%02d_%02d.off'%(ii,ii+1)),
|
||||
'rangeOff2' : os.path.join(inps.secondary, 'overlap', IWstr, 'range_top_%02d_%02d.off'%(ii,ii+1)),
|
||||
|
|
|
@ -0,0 +1,358 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isce, isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
|
||||
def datesFromPairs(pairs):
|
||||
'''get all dates from pairs
|
||||
'''
|
||||
dates = []
|
||||
for p in pairs:
|
||||
for x in p.split('_'):
|
||||
if x not in dates:
|
||||
dates.append(x)
|
||||
|
||||
dates.sort()
|
||||
|
||||
return dates
|
||||
|
||||
|
||||
def least_sqares(H, S, W=None):
|
||||
'''
|
||||
#This can make use multiple threads (set environment variable: OMP_NUM_THREADS)
|
||||
linear equations: H theta = s
|
||||
W: weight matrix
|
||||
'''
|
||||
|
||||
S.reshape(H.shape[0], 1)
|
||||
if W is None:
|
||||
#use np.dot instead since some old python versions don't have matmul
|
||||
m1 = np.linalg.inv(np.dot(H.transpose(), H))
|
||||
Z = np.dot( np.dot(m1, H.transpose()) , S)
|
||||
else:
|
||||
#use np.dot instead since some old python versions don't have matmul
|
||||
m1 = np.linalg.inv(np.dot(np.dot(H.transpose(), W), H))
|
||||
Z = np.dot(np.dot(np.dot(m1, H.transpose()), W), S)
|
||||
|
||||
return Z.reshape(Z.size)
|
||||
|
||||
|
||||
def interp_2d(data1, numberRangeLooks1, numberRangeLooks2, numberAzimuthLooks1, numberAzimuthLooks2, width2=None, length2=None):
|
||||
'''
|
||||
interpolate data1 of numberRangeLooks1/numberAzimuthLooks1 to data2 of numberRangeLooks2/numberAzimuthLooks2
|
||||
'''
|
||||
length1, width1 = data1.shape
|
||||
|
||||
if width2 is None:
|
||||
width2 = int(np.around(width1*numberRangeLooks1/numberRangeLooks2))
|
||||
if length2 is None:
|
||||
length2 = int(np.around(length1*numberAzimuthLooks1/numberAzimuthLooks2))
|
||||
|
||||
|
||||
#number of range looks input
|
||||
nrli = numberRangeLooks1
|
||||
#number of range looks output
|
||||
nrlo = numberRangeLooks2
|
||||
#number of azimuth looks input
|
||||
nali = numberAzimuthLooks1
|
||||
#number of azimuth looks output
|
||||
nalo = numberAzimuthLooks2
|
||||
|
||||
index1 = np.linspace(0, width1-1, num=width1, endpoint=True)
|
||||
index2 = np.linspace(0, width2-1, num=width2, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli)
|
||||
data2 = np.zeros((length2, width2), dtype=data1.dtype)
|
||||
for i in range(length1):
|
||||
f = interp1d(index1, data1[i,:], kind='cubic', fill_value="extrapolate")
|
||||
data2[i, :] = f(index2)
|
||||
|
||||
index1 = np.linspace(0, length1-1, num=length1, endpoint=True)
|
||||
index2 = np.linspace(0, length2-1, num=length2, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali)
|
||||
for j in range(width2):
|
||||
f = interp1d(index1, data2[0:length1, j], kind='cubic', fill_value="extrapolate")
|
||||
data2[:, j] = f(index2)
|
||||
|
||||
return data2
|
||||
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
command line parser.
|
||||
'''
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description='least squares estimation')
|
||||
parser.add_argument('--idir', dest='idir', type=str, required=True,
|
||||
help = 'input directory where each pair (YYYYMMDD_YYYYMMDD) is located. only folders are recognized')
|
||||
parser.add_argument('--odir', dest='odir', type=str, required=True,
|
||||
help = 'output directory for estimated result of each date')
|
||||
parser.add_argument('--zro_date', dest='zro_date', type=str, default=None,
|
||||
help = 'date in least squares estimation whose ionospheric phase is assumed to be zero. format: YYYYMMDD. default: first date')
|
||||
|
||||
parser.add_argument('--exc_date', dest='exc_date', type=str, nargs='+', default=[],
|
||||
help = 'pairs involving these dates are excluded in least squares estimation. a number of dates seperated by blanks. format: YYYYMMDD YYYYMMDD YYYYMMDD...')
|
||||
parser.add_argument('--exc_pair', dest='exc_pair', type=str, nargs='+', default=[],
|
||||
help = 'pairs excluded in least squares estimation. a number of pairs seperated by blanks. format: YYYYMMDD-YYYYMMDD YYYYMMDD-YYYYMMDD...')
|
||||
parser.add_argument('--tsmax', dest='tsmax', type=float, default=None,
|
||||
help = 'maximum time span in years of pairs used in least squares estimation. default: None')
|
||||
|
||||
parser.add_argument('--nrlks1', dest='nrlks1', type=int, default=1,
|
||||
help = 'number of range looks of input. default: 1')
|
||||
parser.add_argument('--nalks1', dest='nalks1', type=int, default=1,
|
||||
help = 'number of azimuth looks of input. default: 1')
|
||||
parser.add_argument('--nrlks2', dest='nrlks2', type=int, default=1,
|
||||
help = 'number of range looks of output. default: 1')
|
||||
parser.add_argument('--nalks2', dest='nalks2', type=int, default=1,
|
||||
help = 'number of azimuth looks of output. default: 1')
|
||||
parser.add_argument('--width2', dest='width2', type=int, default=None,
|
||||
help = 'width of output result. default: None, determined by program')
|
||||
parser.add_argument('--length2', dest='length2', type=int, default=None,
|
||||
help = 'length of output result. default: None, determined by program')
|
||||
parser.add_argument('--merged_geom', dest='merged_geom', type=str, default=None,
|
||||
help = 'a merged geometry file for getting width2/length2, e.g. merged/geom_reference/hgt.rdr. if provided, --width2/--length2 will be overwritten')
|
||||
|
||||
parser.add_argument('--interp', dest='interp', action='store_true', default=False,
|
||||
help='interpolate estimated result to nrlks2/nalks2 sample size')
|
||||
parser.add_argument('--msk_overlap', dest='msk_overlap', action='store_true', default=False,
|
||||
help='mask output with overlap of all acquisitions')
|
||||
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
print('')
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
inps = cmdLineParse()
|
||||
|
||||
|
||||
#get user parameters from input
|
||||
idir = inps.idir
|
||||
odir = inps.odir
|
||||
dateZero = inps.zro_date
|
||||
dateExcluded = inps.exc_date
|
||||
pairExcluded = inps.exc_pair
|
||||
tsmax = inps.tsmax
|
||||
numberRangeLooks1 = inps.nrlks1
|
||||
numberAzimuthLooks1 = inps.nalks1
|
||||
numberRangeLooks2 = inps.nrlks2
|
||||
numberAzimuthLooks2 = inps.nalks2
|
||||
width2 = inps.width2
|
||||
length2 = inps.length2
|
||||
mergedGeom = inps.merged_geom
|
||||
interp = inps.interp
|
||||
maskOverlap = inps.msk_overlap
|
||||
#######################################################
|
||||
|
||||
#all pair folders in order
|
||||
pairDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*_*')))
|
||||
pairDirs = [x for x in pairDirs if os.path.isdir(x)]
|
||||
|
||||
#all pairs in order
|
||||
pairsAll = [os.path.basename(x) for x in pairDirs]
|
||||
#all dates in order
|
||||
datesAll = datesFromPairs(pairsAll)
|
||||
|
||||
#select pairs
|
||||
pairs = []
|
||||
for x in pairsAll:
|
||||
dateReference, dateSecondary = x.split('_')
|
||||
timeReference = datetime.datetime.strptime(dateReference, "%Y%m%d")
|
||||
timeSecondary = datetime.datetime.strptime(dateSecondary, "%Y%m%d")
|
||||
ts = np.absolute((timeSecondary - timeReference).total_seconds()) / (365.0 * 24.0 * 3600)
|
||||
if (dateReference in dateExcluded) and (dateSecondary in dateExcluded):
|
||||
continue
|
||||
if (x in pairExcluded):
|
||||
continue
|
||||
if tsmax is not None:
|
||||
if ts > tsmax:
|
||||
continue
|
||||
pairs.append(x)
|
||||
|
||||
dates = datesFromPairs(pairs)
|
||||
if dateZero is not None:
|
||||
if dateZero not in dates:
|
||||
raise Exception('zro_date provided by user not in the dates involved in least squares estimation.')
|
||||
else:
|
||||
dateZero = dates[0]
|
||||
|
||||
print('all pairs:\n{}'.format(' '.join(pairsAll)))
|
||||
print('all dates:\n{}'.format(' '.join(datesAll)))
|
||||
print('used pairs:\n{}'.format(' '.join(pairs)))
|
||||
print('used dates:\n{}'.format(' '.join(dates)))
|
||||
|
||||
|
||||
####################################################################################
|
||||
print('\nSTEP 1. read files')
|
||||
####################################################################################
|
||||
|
||||
ndate = len(dates)
|
||||
npair = len(pairs)
|
||||
|
||||
ionfile = os.path.join(idir, pairs[0], 'ion_cal', 'filt.ion')
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(ionfile+'.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
ionPairs = np.zeros((npair, length, width), dtype=np.float32)
|
||||
flag = np.ones((length, width), dtype=np.float32)
|
||||
|
||||
#this is reserved for use
|
||||
wls = False
|
||||
stdPairs = np.ones((npair, length, width), dtype=np.float32)
|
||||
for i in range(npair):
|
||||
ionfile = os.path.join(idir, pairs[i], 'ion_cal', 'filt.ion')
|
||||
ionPairs[i, :, :] = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
#flag of valid/invalid is defined by amplitde image
|
||||
amp = (np.fromfile(ionfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
flag *= (amp!=0)
|
||||
|
||||
|
||||
####################################################################################
|
||||
print('\nSTEP 2. do least squares')
|
||||
####################################################################################
|
||||
import copy
|
||||
from numpy.linalg import matrix_rank
|
||||
dates2 = copy.deepcopy(dates)
|
||||
dates2.remove(dateZero)
|
||||
|
||||
#observation matrix
|
||||
H0 = np.zeros((npair, ndate-1))
|
||||
for k in range(npair):
|
||||
dateReference = pairs[k].split('_')[0]
|
||||
dateSecondary = pairs[k].split('_')[1]
|
||||
if dateReference != dateZero:
|
||||
dateReference_i = dates2.index(dateReference)
|
||||
H0[k, dateReference_i] = 1
|
||||
if dateSecondary != dateZero:
|
||||
dateSecondary_i = dates2.index(dateSecondary)
|
||||
H0[k, dateSecondary_i] = -1
|
||||
rank = matrix_rank(H0)
|
||||
if rank < ndate-1:
|
||||
raise Exception('dates to be estimated are not fully connected by the pairs used in least squares')
|
||||
else:
|
||||
print('number of pairs to be used in least squares: {}'.format(npair))
|
||||
print('number of dates to be estimated: {}'.format(ndate-1))
|
||||
print('observation matrix rank: {}'.format(rank))
|
||||
|
||||
ts = np.zeros((ndate-1, length, width), dtype=np.float32)
|
||||
for i in range(length):
|
||||
if (i+1) % 50 == 0 or (i+1) == length:
|
||||
print('processing line: %6d of %6d' % (i+1, length), end='\r')
|
||||
if (i+1) == length:
|
||||
print()
|
||||
for j in range(width):
|
||||
|
||||
#observed signal
|
||||
S0 = ionPairs[:, i, j]
|
||||
|
||||
if wls == False:
|
||||
#observed signal
|
||||
S = S0
|
||||
H = H0
|
||||
else:
|
||||
#add weight
|
||||
#https://stackoverflow.com/questions/19624997/understanding-scipys-least-square-function-with-irls
|
||||
#https://stackoverflow.com/questions/27128688/how-to-use-least-squares-with-weight-matrix-in-python
|
||||
wgt = (stdPairs[:, i, j])**2
|
||||
W = np.sqrt(1.0/wgt)
|
||||
H = H0 * W[:, None]
|
||||
S = S0 * W
|
||||
|
||||
#do least-squares estimation
|
||||
#[theta, residuals, rank, singular] = np.linalg.lstsq(H, S)
|
||||
#make W full matrix if use W here (which is a slower method)
|
||||
#'using W before this' is faster
|
||||
theta = least_sqares(H, S, W=None)
|
||||
ts[:, i, j] = theta
|
||||
|
||||
# #dump raw estimate
|
||||
# cdir = os.getcwd()
|
||||
# os.makedirs(odir, exist_ok=True)
|
||||
# os.chdir(odir)
|
||||
|
||||
# for i in range(ndate-1):
|
||||
# file_name = 'filt_ion_'+dates2[i]+ml2+'.ion'
|
||||
# ts[i, :, :].astype(np.float32).tofile(file_name)
|
||||
# create_xml(file_name, width, length, 'float')
|
||||
# file_name = 'filt_ion_'+dateZero+ml2+'.ion'
|
||||
# (np.zeros((length, width), dtype=np.float32)).astype(np.float32).tofile(file_name)
|
||||
# create_xml(file_name, width, length, 'float')
|
||||
|
||||
# os.chdir(cdir)
|
||||
|
||||
|
||||
####################################################################################
|
||||
print('\nSTEP 3. interpolate ionospheric phase')
|
||||
####################################################################################
|
||||
from scipy.interpolate import interp1d
|
||||
|
||||
width1 = width
|
||||
length1 = length
|
||||
|
||||
if width2 is None:
|
||||
width2 = int(width1 * numberRangeLooks1 / numberRangeLooks2)
|
||||
if length2 is None:
|
||||
length2 = int(length1 * numberAzimuthLooks1 / numberAzimuthLooks2)
|
||||
if mergedGeom is not None:
|
||||
from osgeo import gdal
|
||||
ds = gdal.Open(mergedGeom + ".vrt", gdal.GA_ReadOnly)
|
||||
width2 = ds.RasterXSize
|
||||
length2 = ds.RasterYSize
|
||||
|
||||
os.makedirs(odir, exist_ok=True)
|
||||
for idate in range(ndate-1):
|
||||
print('interplate {}'.format(dates2[idate]))
|
||||
|
||||
ionrectfile = os.path.join(odir, dates2[idate]+'.ion')
|
||||
if interp and ((numberRangeLooks1 != numberRangeLooks2) or (numberAzimuthLooks1 != numberAzimuthLooks2)):
|
||||
ionrect = interp_2d(ts[idate, :, :], numberRangeLooks1, numberRangeLooks2, numberAzimuthLooks1, numberAzimuthLooks2,
|
||||
width2=width2, length2=length2)
|
||||
|
||||
#mask with overlap of all acquistions
|
||||
if maskOverlap:
|
||||
if idate == 0:
|
||||
flagrect = interp_2d(flag, numberRangeLooks1, numberRangeLooks2, numberAzimuthLooks1, numberAzimuthLooks2,
|
||||
width2=width2, length2=length2)
|
||||
ionrect *= (flagrect>0.5)
|
||||
|
||||
ionrect.astype(np.float32).tofile(ionrectfile)
|
||||
create_xml(ionrectfile, width2, length2, 'float')
|
||||
else:
|
||||
ionrect = ts[idate, :, :]
|
||||
|
||||
if maskOverlap:
|
||||
ionrect *= flag
|
||||
|
||||
ionrect.astype(np.float32).tofile(ionrectfile)
|
||||
create_xml(ionrectfile, width1, length1, 'float')
|
||||
|
||||
ionrectfile = os.path.join(odir, dateZero+'.ion')
|
||||
if interp and ((numberRangeLooks1 != numberRangeLooks2) or (numberAzimuthLooks1 != numberAzimuthLooks2)):
|
||||
(np.zeros((length2, width2), dtype=np.float32)).astype(np.float32).tofile(ionrectfile)
|
||||
create_xml(ionrectfile, width2, length2, 'float')
|
||||
else:
|
||||
(np.zeros((length1, width1), dtype=np.float32)).astype(np.float32).tofile(ionrectfile)
|
||||
create_xml(ionrectfile, width1, length1, 'float')
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,26 @@
|
|||
###ionospheric correction module parameters
|
||||
###the values below are the default values used by the module
|
||||
###remove # to set the parameters
|
||||
|
||||
#maximum window size for filtering ionosphere phase: 200
|
||||
#minimum window size for filtering ionosphere phase: 100
|
||||
#maximum window size for filtering ionosphere azimuth shift: 150
|
||||
#minimum window size for filtering ionosphere azimuth shift: 75
|
||||
|
||||
###seperated islands or areas usually affect ionosphere estimation and it's better to mask them
|
||||
###out. check ion/date1_date2/ion_cal/raw_no_projection.ion for areas to be masked out.
|
||||
###The parameter is a 2-D list. Each element in the 2-D list is a four-element list: [firstLine,
|
||||
###lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the
|
||||
###four elements is specified as -1, the program will use firstLine/lastLine/firstColumn/
|
||||
###lastColumn instead. For exmple, if you want to mask the following two areas out, you can
|
||||
###specify a 2-D list like:
|
||||
###[[100, 200, 100, 200],[1000, 1200, 500, 600]]
|
||||
#areas masked out in ionospheric phase estimation: None
|
||||
|
||||
###better NOT try changing the following two parameters, since they are related
|
||||
###to the filtering parameters above
|
||||
#total number of azimuth looks in the ionosphere processing: 50
|
||||
#total number of range looks in the ionosphere processing: 200
|
||||
###the above numbers should be integer multiples of the below numbers
|
||||
#number of azimuth looks at first stage for ionosphere phase unwrapping: 10
|
||||
#number of range looks at first stage for ionosphere phase unwrapping: 40
|
|
@ -0,0 +1,97 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import os
|
||||
import copy
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj.TopsProc.runIon import multilook
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser(description='multilook unwrapped interferograms')
|
||||
parser.add_argument('-u', '--unw', dest='unw', type=str, required=True,
|
||||
help='input unwrapped interferogram')
|
||||
parser.add_argument('-c', '--cor', dest='cor', type=str, required=True,
|
||||
help='input coherence')
|
||||
parser.add_argument('-o', '--output', dest='output', type=str, required=True,
|
||||
help='output multi-look unwrapped interferogram')
|
||||
parser.add_argument('-r', '--nrlks', dest='nrlks', type=int, default=1,
|
||||
help='number of range looks. Default: 1')
|
||||
parser.add_argument('-a', '--nalks', dest='nalks', type=int, default=1,
|
||||
help='number of azimuth looks. Default: 1')
|
||||
|
||||
return parser
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
nrlks = inps.nrlks
|
||||
nalks = inps.nalks
|
||||
|
||||
if (nrlks == 1) and (nalks == 1):
|
||||
img = isceobj.createImage()
|
||||
img.load(inps.unw + '.xml')
|
||||
img.setFilename(inps.output)
|
||||
img.extraFilename = inps.output+'.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
os.symlink(os.path.abspath(inps.unw), os.path.abspath(inps.output))
|
||||
else:
|
||||
#use coherence to compute weight
|
||||
corName0 = inps.cor
|
||||
corimg = isceobj.createImage()
|
||||
corimg.load(corName0 + '.xml')
|
||||
width = corimg.width
|
||||
length = corimg.length
|
||||
widthNew = np.int(width / nrlks)
|
||||
lengthNew = np.int(length / nalks)
|
||||
cor0 = (np.fromfile(corName0, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
wgt = cor0**2
|
||||
a = multilook(wgt, nalks, nrlks)
|
||||
d = multilook((cor0!=0).astype(np.int), nalks, nrlks)
|
||||
|
||||
#unwrapped file
|
||||
unwrapName0 = inps.unw
|
||||
unwimg = isceobj.createImage()
|
||||
unwimg.load(unwrapName0 + '.xml')
|
||||
unw0 = (np.fromfile(unwrapName0, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
amp0 = (np.fromfile(unwrapName0, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
e = multilook(unw0*wgt, nalks, nrlks)
|
||||
f = multilook(amp0**2, nalks, nrlks)
|
||||
unw = np.zeros((lengthNew*2, widthNew), dtype=np.float32)
|
||||
unw[0:lengthNew*2:2, :] = np.sqrt(f / (d + (d==0)))
|
||||
unw[1:lengthNew*2:2, :] = e / (a + (a==0))
|
||||
|
||||
#output file
|
||||
os.makedirs(os.path.dirname(inps.output), exist_ok=True)
|
||||
unwrapName = inps.output
|
||||
unw.astype(np.float32).tofile(unwrapName)
|
||||
unwimg.setFilename(unwrapName)
|
||||
unwimg.extraFilename = unwrapName + '.vrt'
|
||||
unwimg.setWidth(widthNew)
|
||||
unwimg.setLength(lengthNew)
|
||||
unwimg.renderHdr()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -360,10 +360,12 @@ def main(iargs=None):
|
|||
if inps.isaligned:
|
||||
reference = ifg.reference
|
||||
|
||||
# checking inconsistent number of bursts in the secondary acquisitions
|
||||
if reference.numberOfBursts != ifg.numberOfBursts:
|
||||
raise ValueError('{} has different number of bursts ({}) than the reference ({})'.format(
|
||||
inps.reference, ifg.numberOfBursts, reference.numberOfBursts))
|
||||
#this does not make sense, number of burst in reference is not necessarily number of bursts in interferogram.
|
||||
#so comment it out.
|
||||
# # checking inconsistent number of bursts in the secondary acquisitions
|
||||
# if reference.numberOfBursts != ifg.numberOfBursts:
|
||||
# raise ValueError('{} has different number of bursts ({}) than the reference ({})'.format(
|
||||
# inps.reference, ifg.numberOfBursts, reference.numberOfBursts))
|
||||
|
||||
else:
|
||||
reference = ifg
|
||||
|
|
|
@ -0,0 +1,169 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
|
||||
import os
|
||||
import glob
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
|
||||
from isceobj.TopsProc.runMergeBursts import mergeBox
|
||||
from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks
|
||||
from isceobj.TopsProc.runMergeBursts import mergeBurstsVirtual
|
||||
from isceobj.TopsProc.runMergeBursts import multilook as multilook2
|
||||
|
||||
from Stack import ionParam
|
||||
import s1a_isce_utils as ut
|
||||
|
||||
def createParser():
|
||||
'''
|
||||
Create command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='merge bursts for ionosphere estimation')
|
||||
parser.add_argument('-i', '--reference', type=str, dest='reference', required=True,
|
||||
help='directory with the reference image. will be merged in a box defined by reference')
|
||||
parser.add_argument('-s', '--stack', type=str, dest='stack', default = None,
|
||||
help='directory with the stack xml files which includes the common valid region of each burst in the stack')
|
||||
parser.add_argument('-d', '--dirname', type=str, dest='dirname', required=True,
|
||||
help='directory with products to merge')
|
||||
parser.add_argument('-n', '--name_pattern', type=str, dest='name_pattern', required=True,
|
||||
help = 'a name pattern of burst products that will be merged. e.g.: fine_*.int')
|
||||
parser.add_argument('-o', '--outfile', type=str, dest='outfile', required=True,
|
||||
help='output merged file')
|
||||
parser.add_argument('-r', '--nrlks', type=int, dest='nrlks', default=1,
|
||||
help = 'number of range looks')
|
||||
parser.add_argument('-a', '--nalks', type=int, dest='nalks', default=1,
|
||||
help = 'number of azimuth looks')
|
||||
parser.add_argument('-u', '--nrlks0', type=int, dest='nrlks0', default=1,
|
||||
help = 'number of range looks 0')
|
||||
parser.add_argument('-v', '--nalks0', type=int, dest='nalks0', default=1,
|
||||
help = 'number of azimuth looks 0')
|
||||
parser.add_argument('-x', '--rvalid', type=int, dest='rvalid', default=None,
|
||||
help = 'number of valid samples in a multilook window in range, 1<=rvalid<=nrlks. default: nrlks')
|
||||
parser.add_argument('-y', '--avalid', type=int, dest='avalid', default=None,
|
||||
help = 'number of valid lines in a multilook window in azimuth, 1<=avalid<=nalks. default: nalks')
|
||||
parser.add_argument('-w', '--swath', type=int, dest='swath', default=None,
|
||||
help = 'swaths to merge, 1 or 2 or 3. default: all swaths')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = createParser()
|
||||
inps = parser.parse_args(args=iargs)
|
||||
|
||||
return inps
|
||||
|
||||
|
||||
def updateValid(frame1, frame2):
|
||||
'''
|
||||
update frame1 valid with frame2 valid
|
||||
'''
|
||||
|
||||
min1 = frame1.bursts[0].burstNumber
|
||||
max1 = frame1.bursts[-1].burstNumber
|
||||
|
||||
min2 = frame2.bursts[0].burstNumber
|
||||
max2 = frame2.bursts[-1].burstNumber
|
||||
|
||||
minBurst = max(min1, min2)
|
||||
maxBurst = min(max1, max2)
|
||||
|
||||
for ii in range(minBurst, maxBurst + 1):
|
||||
frame1.bursts[ii-min1].firstValidLine = frame2.bursts[ii-min2].firstValidLine
|
||||
frame1.bursts[ii-min1].firstValidSample = frame2.bursts[ii-min2].firstValidSample
|
||||
frame1.bursts[ii-min1].numValidLines = frame2.bursts[ii-min2].numValidLines
|
||||
frame1.bursts[ii-min1].numValidSamples = frame2.bursts[ii-min2].numValidSamples
|
||||
|
||||
|
||||
return
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
merge bursts
|
||||
'''
|
||||
inps=cmdLineParse(iargs)
|
||||
|
||||
if inps.rvalid is None:
|
||||
inps.rvalid = 'strict'
|
||||
else:
|
||||
if not (1 <= inps.rvalid <= inps.nrlks):
|
||||
raise Exception('1<=rvalid<=nrlks')
|
||||
if inps.avalid is None:
|
||||
inps.avalid = 'strict'
|
||||
else:
|
||||
if not (1 <= inps.avalid <= inps.nalks):
|
||||
raise Exception('1<=avalid<=nalks')
|
||||
|
||||
namePattern = inps.name_pattern.split('*')
|
||||
|
||||
frameReferenceList=[]
|
||||
frameProductList=[]
|
||||
burstList = []
|
||||
swathList = ut.getSwathList(inps.reference)
|
||||
for swath in swathList:
|
||||
frameReference = ut.loadProduct(os.path.join(inps.reference, 'IW{0}.xml'.format(swath)))
|
||||
|
||||
minBurst = frameReference.bursts[0].burstNumber
|
||||
maxBurst = frameReference.bursts[-1].burstNumber
|
||||
if minBurst==maxBurst:
|
||||
print('Skipping processing of swath {0}'.format(swath))
|
||||
continue
|
||||
|
||||
frameProduct = ut.loadProduct(os.path.join(inps.dirname, 'IW{0}.xml'.format(swath)))
|
||||
minBurst = frameProduct.bursts[0].burstNumber
|
||||
maxBurst = frameProduct.bursts[-1].burstNumber
|
||||
|
||||
if inps.stack is not None:
|
||||
print('Updating the valid region of each burst to the common valid region of the stack')
|
||||
frameStack = ut.loadProduct(os.path.join(inps.stack, 'IW{0}.xml'.format(swath)))
|
||||
updateValid(frameReference, frameStack)
|
||||
updateValid(frameProduct, frameStack)
|
||||
|
||||
|
||||
frameReferenceList.append(frameReference)
|
||||
|
||||
if inps.swath is not None:
|
||||
if swath == inps.swath:
|
||||
frameProductList.append(frameProduct)
|
||||
burstList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0]+'%02d'%(x)+namePattern[1]) for x in range(minBurst, maxBurst+1)])
|
||||
else:
|
||||
frameProductList.append(frameProduct)
|
||||
burstList.append([os.path.join(inps.dirname, 'IW{0}'.format(swath), namePattern[0]+'%02d'%(x)+namePattern[1]) for x in range(minBurst, maxBurst+1)])
|
||||
|
||||
os.makedirs(os.path.dirname(inps.outfile), exist_ok=True)
|
||||
suffix = '.full'
|
||||
if (inps.nrlks0 == 1) and (inps.nalks0 == 1):
|
||||
suffix=''
|
||||
|
||||
box = mergeBox(frameReferenceList)
|
||||
#adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS
|
||||
#here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks.
|
||||
(burstValidBox, burstValidBox2, message) = adjustValidWithLooks(frameProductList, box, inps.nalks, inps.nrlks, edge=0, avalid=inps.avalid, rvalid=inps.rvalid)
|
||||
mergeBurstsVirtual(frameProductList, burstList, box, inps.outfile+suffix)
|
||||
if suffix not in ['',None]:
|
||||
multilook2(inps.outfile+suffix,
|
||||
outname = inps.outfile,
|
||||
alks = inps.nalks0, rlks=inps.nrlks0)
|
||||
#this is never used for ionosphere correction
|
||||
else:
|
||||
print('Skipping multi-looking ....')
|
||||
|
||||
|
||||
if __name__ == '__main__' :
|
||||
'''
|
||||
Merge products burst-by-burst.
|
||||
'''
|
||||
|
||||
main()
|
|
@ -0,0 +1,238 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import os
|
||||
import copy
|
||||
import shutil
|
||||
import argparse
|
||||
import numpy as np
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
|
||||
from isceobj.TopsProc.runMergeBursts import mergeBox
|
||||
from isceobj.TopsProc.runMergeBursts import adjustValidWithLooks
|
||||
from isceobj.TopsProc.runIon import cal_cross_ab_ramp
|
||||
|
||||
from Stack import ionParam
|
||||
import s1a_isce_utils as ut
|
||||
from mergeBurstsIon import updateValid
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser(description='merge swath ionosphere')
|
||||
parser.add_argument('-c', '--reference', type=str, dest='reference', required=True,
|
||||
help='directory with the reference image')
|
||||
parser.add_argument('-s', '--stack', type=str, dest='stack', default = None,
|
||||
help='directory with the stack xml files which includes the common valid region of the stack')
|
||||
parser.add_argument('-i', '--input', dest='input', type=str, required=True,
|
||||
help='directory with input swath ionosphere containing swath directories ion_cal_IW*')
|
||||
parser.add_argument('-o', '--output', dest='output', type=str, required=True,
|
||||
help='directory with output merged ionosphere')
|
||||
parser.add_argument('-r', '--nrlks', type=int, dest='nrlks', default=1,
|
||||
help = 'number of range looks. NOT number of range looks 0')
|
||||
parser.add_argument('-a', '--nalks', type=int, dest='nalks', default=1,
|
||||
help = 'number of azimuth looks. NOT number of azimuth looks 0')
|
||||
parser.add_argument('-m', '--remove_ramp', type=int, dest='remove_ramp', default=0,
|
||||
help = 'remove an empirical ramp as a result of different platforms. 0: no removal (default), 1: S1A-S1B, -1: S1B-S1A')
|
||||
|
||||
return parser
|
||||
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
|
||||
corThresholdSwathAdj = 0.85
|
||||
|
||||
numberRangeLooks = inps.nrlks
|
||||
numberAzimuthLooks = inps.nalks
|
||||
remove_ramp = inps.remove_ramp
|
||||
|
||||
ionParamObj=ionParam()
|
||||
ionParamObj.configure()
|
||||
|
||||
#####################################################################
|
||||
framesBox=[]
|
||||
swathList = sorted(ut.getSwathList(inps.reference))
|
||||
for swath in swathList:
|
||||
frame = ut.loadProduct(os.path.join(inps.reference, 'IW{0}.xml'.format(swath)))
|
||||
|
||||
minBurst = frame.bursts[0].burstNumber
|
||||
maxBurst = frame.bursts[-1].burstNumber
|
||||
if minBurst==maxBurst:
|
||||
print('Skipping processing of swath {0}'.format(swath))
|
||||
continue
|
||||
|
||||
passDirection = frame.bursts[0].passDirection.lower()
|
||||
|
||||
if inps.stack is not None:
|
||||
print('Updating the valid region of each burst to the common valid region of the stack')
|
||||
frame_stack = ut.loadProduct(os.path.join(inps.stack, 'IW{0}.xml'.format(swath)))
|
||||
updateValid(frame, frame_stack)
|
||||
|
||||
framesBox.append(frame)
|
||||
|
||||
box = mergeBox(framesBox)
|
||||
#adjust valid with looks, 'frames' ARE CHANGED AFTER RUNNING THIS
|
||||
#here numberRangeLooks, instead of numberRangeLooks0, is used, since we need to do next step multilooking after unwrapping. same for numberAzimuthLooks.
|
||||
(burstValidBox, burstValidBox2, message) = adjustValidWithLooks(framesBox, box, numberAzimuthLooks, numberRangeLooks, edge=0, avalid='strict', rvalid='strict')
|
||||
|
||||
|
||||
#1. we use adjustValidWithLooks() to compute burstValidBox for extracting burst bounding boxes, use each burst's bounding box to retrive
|
||||
#the corresponding burst in merged swath image and then put the burst in the final merged image.
|
||||
|
||||
#so there is no need to use interferogram IW*.xml, reference IW*.xml is good enough. If there is no corresponding burst in interferogram
|
||||
#IW*.xml, the burst in merged swath image is just zero, and we can put this zero burst in the final merged image.
|
||||
|
||||
#2. we use mergeBox() to compute box[1] to be used in cal_cross_ab_ramp()
|
||||
|
||||
#####################################################################
|
||||
|
||||
numValidSwaths = len(swathList)
|
||||
|
||||
if numValidSwaths == 1:
|
||||
print('there is only one valid swath, simply copy the files')
|
||||
|
||||
os.makedirs(inps.output, exist_ok=True)
|
||||
corName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swathList[0]), 'raw_no_projection.cor')
|
||||
ionName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swathList[0]), 'raw_no_projection.ion')
|
||||
corOutName = os.path.join(inps.output, 'raw_no_projection.cor')
|
||||
ionOutName = os.path.join(inps.output, 'raw_no_projection.ion')
|
||||
|
||||
shutil.copy2(corName, corOutName)
|
||||
shutil.copy2(ionName, ionOutName)
|
||||
#os.symlink(os.path.abspath(corName), os.path.abspath(corOutName))
|
||||
#os.symlink(os.path.abspath(ionName), os.path.abspath(ionOutName))
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(corName + '.xml')
|
||||
img.setFilename(corOutName)
|
||||
img.extraFilename = corOutName+'.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(ionName + '.xml')
|
||||
img.setFilename(ionOutName)
|
||||
img.extraFilename = ionOutName+'.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
return
|
||||
|
||||
print('merging swaths')
|
||||
|
||||
|
||||
corList = []
|
||||
ampList = []
|
||||
ionosList = []
|
||||
for swath in swathList:
|
||||
corName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swath), 'raw_no_projection.cor')
|
||||
ionName = os.path.join(inps.input, 'ion_cal_IW{}'.format(swath), 'raw_no_projection.ion')
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(ionName + '.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
amp = (np.fromfile(corName, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
cor = (np.fromfile(corName, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
ion = (np.fromfile(ionName, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
|
||||
corList.append(cor)
|
||||
ampList.append(amp)
|
||||
ionosList.append(ion)
|
||||
|
||||
#do adjustment between ajacent swaths
|
||||
if numValidSwaths == 3:
|
||||
adjustList = [ionosList[0], ionosList[2]]
|
||||
else:
|
||||
adjustList = [ionosList[0]]
|
||||
for adjdata in adjustList:
|
||||
index = np.nonzero((adjdata!=0) * (ionosList[1]!=0) * (corList[1] > corThresholdSwathAdj))
|
||||
if index[0].size < 5:
|
||||
print('WARNING: too few samples available for adjustment between swaths: {} with coherence threshold: {}'.format(index[0].size, corThresholdSwathAdj))
|
||||
print(' no adjustment made')
|
||||
print(' to do ajustment, please consider using lower coherence threshold')
|
||||
else:
|
||||
print('number of samples available for adjustment in the overlap area: {}'.format(index[0].size))
|
||||
#diff = np.mean((ionosList[1] - adjdata)[index], dtype=np.float64)
|
||||
|
||||
#use weighted mean instead
|
||||
wgt = corList[1][index]**14
|
||||
diff = np.sum((ionosList[1] - adjdata)[index] * wgt / np.sum(wgt, dtype=np.float64), dtype=np.float64)
|
||||
|
||||
index2 = np.nonzero(adjdata!=0)
|
||||
adjdata[index2] = adjdata[index2] + diff
|
||||
|
||||
#get merged ionosphere
|
||||
ampMerged = np.zeros((length, width), dtype=np.float32)
|
||||
corMerged = np.zeros((length, width), dtype=np.float32)
|
||||
ionosMerged = np.zeros((length, width), dtype=np.float32)
|
||||
for i in range(numValidSwaths):
|
||||
nBurst = len(burstValidBox[i])
|
||||
for j in range(nBurst):
|
||||
|
||||
#index after multi-looking in merged image, index starts from 1
|
||||
first_line = np.int(np.around((burstValidBox[i][j][0] - 1) / numberAzimuthLooks + 1))
|
||||
last_line = np.int(np.around(burstValidBox[i][j][1] / numberAzimuthLooks))
|
||||
first_sample = np.int(np.around((burstValidBox[i][j][2] - 1) / numberRangeLooks + 1))
|
||||
last_sample = np.int(np.around(burstValidBox[i][j][3] / numberRangeLooks))
|
||||
|
||||
corMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \
|
||||
corList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1]
|
||||
|
||||
ampMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \
|
||||
ampList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1]
|
||||
|
||||
ionosMerged[first_line-1:last_line-1+1, first_sample-1:last_sample-1+1] = \
|
||||
ionosList[i][first_line-1:last_line-1+1, first_sample-1:last_sample-1+1]
|
||||
|
||||
#remove an empirical ramp
|
||||
if remove_ramp != 0:
|
||||
#warningInfo = '{} calculating ionosphere for cross S-1A/B interferogram, an empirical ramp is removed from estimated ionosphere\n'.format(datetime.datetime.now())
|
||||
#with open(os.path.join(ionParam.ionDirname, ionParam.warning), 'a') as f:
|
||||
# f.write(warningInfo)
|
||||
|
||||
abramp = cal_cross_ab_ramp(swathList, box[1], numberRangeLooks, passDirection)
|
||||
if remove_ramp == -1:
|
||||
abramp *= -1.0
|
||||
#currently do not apply this
|
||||
#ionosMerged -= abramp[None, :]
|
||||
|
||||
#dump ionosphere
|
||||
os.makedirs(inps.output, exist_ok=True)
|
||||
outFilename = os.path.join(inps.output, ionParamObj.ionRawNoProj)
|
||||
ion = np.zeros((length*2, width), dtype=np.float32)
|
||||
ion[0:length*2:2, :] = ampMerged
|
||||
ion[1:length*2:2, :] = ionosMerged
|
||||
ion.astype(np.float32).tofile(outFilename)
|
||||
img.filename = outFilename
|
||||
img.extraFilename = outFilename + '.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
#dump coherence
|
||||
outFilename = os.path.join(inps.output, ionParamObj.ionCorNoProj)
|
||||
ion[1:length*2:2, :] = corMerged
|
||||
ion.astype(np.float32).tofile(outFilename)
|
||||
img.filename = outFilename
|
||||
img.extraFilename = outFilename + '.vrt'
|
||||
img.renderHdr()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
import datetime
|
||||
import numpy as np
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import isce, isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
command line parser.
|
||||
'''
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description='check ionospheric correction results')
|
||||
parser.add_argument('-idir', dest='idir', type=str, required=True,
|
||||
help = 'input directory where each date (YYYYMMDD.ion) is located. only files *.ion are recognized')
|
||||
parser.add_argument('-odir', dest='odir', type=str, required=True,
|
||||
help = 'output directory')
|
||||
parser.add_argument('-dates', dest='dates', type=str, nargs='+', default=None,
|
||||
help = 'a number of dates seperated by blanks. format: YYYYMMDD YYYYMMDD YYYYMMDD... This argument has highest priority. When provided, only process these dates')
|
||||
|
||||
# parser.add_argument('-nrlks', dest='nrlks', type=int, default=1,
|
||||
# help = 'number of range looks 1 * number of range looks ion. default: 1')
|
||||
# parser.add_argument('-nalks', dest='nalks', type=int, default=1,
|
||||
# help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1')
|
||||
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
print('')
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
inps = cmdLineParse()
|
||||
|
||||
|
||||
#get user parameters from input
|
||||
idir = inps.idir
|
||||
odir = inps.odir
|
||||
datesUser = inps.dates
|
||||
#######################################################
|
||||
|
||||
if shutil.which('montage') is None:
|
||||
raise Exception('this command requires montage in ImageMagick\n')
|
||||
|
||||
|
||||
#get date folders
|
||||
dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*.ion')))
|
||||
dateDirs = [os.path.splitext(os.path.basename(x))[0] for x in dateDirs if os.path.isfile(x)]
|
||||
if datesUser is not None:
|
||||
pairs = datesUser
|
||||
else:
|
||||
pairs = dateDirs
|
||||
|
||||
os.makedirs(odir, exist_ok=True)
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(os.path.join(idir, pairs[0] + '.ion.xml'))
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
widthMax = 600
|
||||
if width >= widthMax:
|
||||
ratio = widthMax / width
|
||||
resize = ' -resize {}%'.format(ratio*100.0)
|
||||
else:
|
||||
ratio = 1.0
|
||||
resize = ''
|
||||
|
||||
for ipair in pairs:
|
||||
ion = os.path.join(idir, ipair + '.ion')
|
||||
runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, odir))
|
||||
runCmd("montage -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format(
|
||||
int((ratio*width)/111*9+0.5),
|
||||
ipair,
|
||||
os.path.join(odir, 'out.ppm'),
|
||||
int((ratio*width)/111*5+0.5),
|
||||
resize,
|
||||
os.path.join(odir, ipair)))
|
||||
runCmd('rm {}'.format(os.path.join(odir, 'out.ppm')))
|
||||
|
||||
|
||||
#create colorbar
|
||||
width_colorbar = 100
|
||||
length_colorbar = 20
|
||||
colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \
|
||||
(np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:]
|
||||
colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar'))
|
||||
runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir))
|
||||
runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff')))
|
||||
runCmd('rm {} {}'.format(
|
||||
os.path.join(odir, 'colorbar'),
|
||||
os.path.join(odir, 'out.ppm')))
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,113 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import shutil
|
||||
import datetime
|
||||
import numpy as np
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
import isce, isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
command line parser.
|
||||
'''
|
||||
import sys
|
||||
import argparse
|
||||
|
||||
parser = argparse.ArgumentParser(description='check ionospheric correction results')
|
||||
parser.add_argument('-idir', dest='idir', type=str, required=True,
|
||||
help = 'input directory where each pair (YYYYMMDD-YYYYMMDD) is located. only folders are recognized')
|
||||
parser.add_argument('-odir', dest='odir', type=str, required=True,
|
||||
help = 'output directory')
|
||||
parser.add_argument('-pairs', dest='pairs', type=str, nargs='+', default=None,
|
||||
help = 'a number of pairs seperated by blanks. format: YYYYMMDD-YYYYMMDD YYYYMMDD-YYYYMMDD YYYYMMDD-YYYYMMDD... This argument has highest priority. When provided, only process these pairs')
|
||||
|
||||
# parser.add_argument('-nrlks', dest='nrlks', type=int, default=1,
|
||||
# help = 'number of range looks 1 * number of range looks ion. default: 1')
|
||||
# parser.add_argument('-nalks', dest='nalks', type=int, default=1,
|
||||
# help = 'number of azimuth looks 1 * number of azimuth looks ion. default: 1')
|
||||
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
print('')
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
inps = cmdLineParse()
|
||||
|
||||
|
||||
#get user parameters from input
|
||||
idir = inps.idir
|
||||
odir = inps.odir
|
||||
pairsUser = inps.pairs
|
||||
#######################################################
|
||||
|
||||
if shutil.which('montage') is None:
|
||||
raise Exception('this command requires montage in ImageMagick\n')
|
||||
|
||||
|
||||
#get date folders
|
||||
dateDirs = sorted(glob.glob(os.path.join(os.path.abspath(idir), '*_*')))
|
||||
dateDirs = [os.path.basename(x) for x in dateDirs if os.path.isdir(x)]
|
||||
if pairsUser is not None:
|
||||
pairs = pairsUser
|
||||
else:
|
||||
pairs = dateDirs
|
||||
|
||||
os.makedirs(odir, exist_ok=True)
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(os.path.join(idir, pairs[0], 'ion_cal', 'filt.ion.xml'))
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
widthMax = 600
|
||||
if width >= widthMax:
|
||||
ratio = widthMax / width
|
||||
resize = ' -resize {}%'.format(ratio*100.0)
|
||||
else:
|
||||
ratio = 1.0
|
||||
resize = ''
|
||||
|
||||
for ipair in pairs:
|
||||
ion = os.path.join(idir, ipair, 'ion_cal', 'filt.ion')
|
||||
runCmd('mdx {} -s {} -rhdr {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(ion, width, width*4, odir))
|
||||
runCmd("montage -pointsize {} -label '{}' {} -geometry +{} -compress LZW{} {}.tif".format(
|
||||
int((ratio*width)/111*9+0.5),
|
||||
ipair,
|
||||
os.path.join(odir, 'out.ppm'),
|
||||
int((ratio*width)/111*5+0.5),
|
||||
resize,
|
||||
os.path.join(odir, ipair)))
|
||||
runCmd('rm {}'.format(os.path.join(odir, 'out.ppm')))
|
||||
|
||||
|
||||
#create colorbar
|
||||
width_colorbar = 100
|
||||
length_colorbar = 20
|
||||
colorbar = np.ones((length_colorbar, width_colorbar), dtype=np.float32) * \
|
||||
(np.linspace(-np.pi, np.pi, num=width_colorbar,endpoint=True,dtype=np.float32))[None,:]
|
||||
colorbar.astype(np.float32).tofile(os.path.join(odir, 'colorbar'))
|
||||
runCmd('mdx {} -s {} -cmap cmy -wrap 6.283185307179586 -addr -3.141592653589793 -P -workdir {}'.format(os.path.join(odir, 'colorbar'), width_colorbar, odir))
|
||||
runCmd('convert {} -compress LZW -resize 100% {}'.format(os.path.join(odir, 'out.ppm'), os.path.join(odir, 'colorbar_-pi_pi.tiff')))
|
||||
runCmd('rm {} {}'.format(
|
||||
os.path.join(odir, 'colorbar'),
|
||||
os.path.join(odir, 'out.ppm')))
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,475 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#Cunren Liang, 22-MAR-2018
|
||||
|
||||
#this command can run multiple times for a stack
|
||||
#example command
|
||||
#../../code/s1_select_ion.py -dir . -sn 34/38.5 -nr 5
|
||||
|
||||
import os
|
||||
import sys
|
||||
import glob
|
||||
import shutil
|
||||
import zipfile
|
||||
import argparse
|
||||
import datetime
|
||||
import numpy as np
|
||||
import xml.etree.ElementTree as ET
|
||||
|
||||
|
||||
class sentinelSLC(object):
|
||||
"""
|
||||
A Class representing the SLCs
|
||||
"""
|
||||
def __init__(self, safe_file, orbit_file=None):
|
||||
self.safe_file = safe_file
|
||||
self.orbit_file = orbit_file
|
||||
|
||||
def get_datetime(self):
|
||||
datefmt = "%Y%m%dT%H%M%S"
|
||||
safe = os.path.basename(self.safe_file)
|
||||
fields = safe.split('_')
|
||||
self.platform = fields[0]
|
||||
self.start_date_time = datetime.datetime.strptime(fields[5], datefmt)
|
||||
self.stop_date_time = datetime.datetime.strptime(fields[6], datefmt)
|
||||
self.date = (datetime.datetime.date(self.start_date_time)).isoformat().replace('-','')
|
||||
|
||||
def get_param(self):
|
||||
#speed of light (m/s)
|
||||
c = 299792458.0
|
||||
|
||||
#1. processing software version
|
||||
zf = zipfile.ZipFile(self.safe_file, 'r')
|
||||
manifest = [item for item in zf.namelist() if '.SAFE/manifest.safe' in item][0]
|
||||
xmlstr = zf.read(manifest)
|
||||
root = ET.fromstring(xmlstr)
|
||||
elem = root.find('.//metadataObject[@ID="processing"]')
|
||||
|
||||
#setup namespace
|
||||
nsp = "{http://www.esa.int/safe/sentinel-1.0}"
|
||||
rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility').attrib
|
||||
self.proc_site = rdict['site'] +', '+ rdict['country']
|
||||
|
||||
rdict = elem.find('.//xmlData/' + nsp + 'processing/' + nsp + 'facility/' + nsp + 'software').attrib
|
||||
#ver = rdict['name'] + ' ' + rdict['version']
|
||||
self.proc_version = rdict['version']
|
||||
|
||||
|
||||
#2. start ranges
|
||||
anna = sorted([item for item in zf.namelist() if '.SAFE/annotation/s1' in item])
|
||||
#dual polarization. for the same swath, the slant ranges of two polarizations should be the same.
|
||||
if len(anna) == 6:
|
||||
anna = anna[1:6:2]
|
||||
|
||||
startingRange = []
|
||||
for k in range(3):
|
||||
xmlstr = zf.read(anna[k])
|
||||
root = ET.fromstring(xmlstr)
|
||||
startingRange.append(
|
||||
float(root.find('imageAnnotation/imageInformation/slantRangeTime').text)*c/2.0
|
||||
)
|
||||
|
||||
self.startingRanges = startingRange
|
||||
|
||||
|
||||
#3. snwe
|
||||
map_overlay = [item for item in zf.namelist() if '.SAFE/preview/map-overlay.kml' in item][0]
|
||||
xmlstr = zf.read(map_overlay)
|
||||
xmlstr = xmlstr.decode('utf-8')
|
||||
start = '<coordinates>'
|
||||
end = '</coordinates>'
|
||||
pnts = xmlstr[xmlstr.find(start)+len(start):xmlstr.find(end)].split()
|
||||
|
||||
lats=[]
|
||||
lons=[]
|
||||
for pnt in pnts:
|
||||
lons.append(float(pnt.split(',')[0]))
|
||||
lats.append(float(pnt.split(',')[1]))
|
||||
self.snwe=[min(lats),max(lats),min(lons),max(lons)]
|
||||
|
||||
|
||||
def get_safe_from_group(group):
|
||||
|
||||
safes = []
|
||||
ngroup = len(group)
|
||||
for i in range(ngroup):
|
||||
ngroupi = len(group[i])
|
||||
for j in range(ngroupi):
|
||||
safes.append(group[i][j].safe_file)
|
||||
|
||||
return safes
|
||||
|
||||
|
||||
def print_group(group):
|
||||
'''print group parameters
|
||||
'''
|
||||
print()
|
||||
print('slice no ver IW1 (m) IW2 (m) IW3 (m)')
|
||||
print('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
|
||||
|
||||
for i in range(len(group)):
|
||||
for j in range(len(group[i])):
|
||||
print_stuff = '%s %3d %s '%(os.path.basename(group[i][j].safe_file), i+1, group[i][j].proc_version)
|
||||
print_stuff += "{} {} {}".format(group[i][j].startingRanges[0], group[i][j].startingRanges[1], group[i][j].startingRanges[2])
|
||||
print(print_stuff)
|
||||
print()
|
||||
|
||||
|
||||
def get_group(dir0):
|
||||
'''
|
||||
this routine group the slices, each group is an acquisition
|
||||
the returned result is a list (acquistions sorted by starting time) containing a number of lists (slices sorted by starting time)
|
||||
'''
|
||||
#sort by starting time
|
||||
zips = sorted(glob.glob(os.path.join(dir0, 'S1*_IW_SLC_*.zip')), key=lambda x: x.split('_')[-5], reverse=False)
|
||||
nzips = len(zips)
|
||||
|
||||
group = []
|
||||
for i in range(nzips):
|
||||
safeObj=sentinelSLC(zips[i])
|
||||
safeObj.get_datetime()
|
||||
safeObj.get_param()
|
||||
|
||||
datefmt = "%Y%m%dT%H%M%S"
|
||||
fields = zips[i].split('_')
|
||||
tbef = datetime.datetime.strptime(fields[-5], datefmt)
|
||||
taft = datetime.datetime.strptime(fields[-4], datefmt)
|
||||
|
||||
if i == 0:
|
||||
#create new group
|
||||
tbef0 = tbef
|
||||
group0 = []
|
||||
|
||||
#S-1A is capable of operating up to 25 min per orbit [21]
|
||||
#Yague-Martinez et al., "Interferometric Processing of Sentinel-1 TOPS Data,"
|
||||
#S1A/B revisit time is 6 days, here we use 1 day to check if from the same orbit
|
||||
if np.absolute((tbef - tbef0).total_seconds()) < 24 * 3600:
|
||||
group0.append(safeObj)
|
||||
else:
|
||||
group.append(group0)
|
||||
#create new group
|
||||
tbef0 = tbef
|
||||
group0 = []
|
||||
group0.append(safeObj)
|
||||
|
||||
if i == nzips - 1:
|
||||
group.append(group0)
|
||||
|
||||
return group
|
||||
|
||||
|
||||
def check_redundancy(group, threshold=1):
|
||||
'''
|
||||
threshold: time difference threshold in seconds between two slices in second.
|
||||
this routine checks, for a slice, if there are multiple ones.
|
||||
'''
|
||||
|
||||
print('\nchecking different copies of same slice')
|
||||
|
||||
safe_removed_indices = []
|
||||
ngroup = len(group)
|
||||
for i in range(ngroup):
|
||||
ngroupi = len(group[i])
|
||||
if ngroupi == 1:
|
||||
continue
|
||||
else:
|
||||
for j in range(ngroupi-1):
|
||||
for k in range(j+1, ngroupi):
|
||||
if np.absolute((group[i][j].start_date_time - group[i][k].start_date_time).total_seconds()) < threshold and \
|
||||
np.absolute((group[i][j].stop_date_time - group[i][k].stop_date_time).total_seconds()) < threshold:
|
||||
|
||||
#determine which one to be removed
|
||||
j_version = False
|
||||
k_version = False
|
||||
for l in range(ngroupi):
|
||||
if l != j and l != k:
|
||||
if group[i][j].proc_version == group[i][l].proc_version:
|
||||
j_version = True
|
||||
if group[i][k].proc_version == group[i][l].proc_version:
|
||||
k_version = True
|
||||
if j_version == k_version:
|
||||
safe_removed_index = [i, j]
|
||||
else:
|
||||
if j_version == False and k_version == True:
|
||||
safe_removed_index = [i, j]
|
||||
else:
|
||||
safe_removed_index = [i, k]
|
||||
safe_removed_indices.append(safe_removed_index)
|
||||
|
||||
print('in acquistion {}, the following two slices are the same:'.format(i+1))
|
||||
if safe_removed_index == [i, j]:
|
||||
print(os.path.basename(group[i][j].safe_file) + ' (not to be used)')
|
||||
print(os.path.basename(group[i][k].safe_file))
|
||||
else:
|
||||
print(os.path.basename(group[i][j].safe_file))
|
||||
print(os.path.basename(group[i][k].safe_file) + ' (not to be used)')
|
||||
|
||||
#remove redundant slices
|
||||
if safe_removed_indices != []:
|
||||
group_new = []
|
||||
for i in range(ngroup):
|
||||
ngroupi = len(group[i])
|
||||
group_new.append([group[i][j] for j in range(ngroupi) if [i,j] not in safe_removed_indices])
|
||||
|
||||
print('slices removed:')
|
||||
for index in safe_removed_indices:
|
||||
print('%s %3d'%(os.path.basename(group[index[0]][index[1]].safe_file), index[0]+1))
|
||||
|
||||
else:
|
||||
group_new = group
|
||||
print('no slices removed')
|
||||
|
||||
return group_new
|
||||
|
||||
|
||||
def check_version(group):
|
||||
print('\nchecking different slice versions of an acquisition')
|
||||
|
||||
acquistion_removed_indices = []
|
||||
ngroup = len(group)
|
||||
for i in range(ngroup):
|
||||
ngroupi = len(group[i])
|
||||
for j in range(ngroupi):
|
||||
if group[i][0].proc_version != group[i][j].proc_version:
|
||||
print('different slice versions found in acquistion {}'.format(i+1))
|
||||
acquistion_removed_indices.append(i)
|
||||
break
|
||||
|
||||
#remove acquistions
|
||||
if acquistion_removed_indices != []:
|
||||
group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices]
|
||||
print('acquistions removed:')
|
||||
for i in acquistion_removed_indices:
|
||||
for j in range(len(group[i])):
|
||||
print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1))
|
||||
else:
|
||||
group_new = group
|
||||
print('no acquistions removed')
|
||||
|
||||
return group_new
|
||||
|
||||
|
||||
def check_gap(group):
|
||||
print('\nchecking gaps in an acquistion')
|
||||
|
||||
acquistion_removed_indices = []
|
||||
ngroup = len(group)
|
||||
for i in range(ngroup):
|
||||
ngroupi = len(group[i])
|
||||
if ngroupi == 1:
|
||||
continue
|
||||
else:
|
||||
for j in range(0, ngroupi-1):
|
||||
if (group[i][j+1].start_date_time - group[i][j].stop_date_time).total_seconds() > 0:
|
||||
acquistion_removed_indices.append(i)
|
||||
break
|
||||
|
||||
#remove acquistions
|
||||
if acquistion_removed_indices != []:
|
||||
group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices]
|
||||
print('acquistions removed:')
|
||||
for i in acquistion_removed_indices:
|
||||
for j in range(len(group[i])):
|
||||
print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1))
|
||||
else:
|
||||
group_new = group
|
||||
print('no acquistions removed')
|
||||
|
||||
return group_new
|
||||
|
||||
|
||||
def acquistion_snwe(groupi):
|
||||
'''return snwe of an acquisition consisting a number of slices'''
|
||||
s = min([x.snwe[0] for x in groupi])
|
||||
n = max([x.snwe[1] for x in groupi])
|
||||
w = min([x.snwe[2] for x in groupi])
|
||||
e = max([x.snwe[3] for x in groupi])
|
||||
|
||||
return [s, n, w, e]
|
||||
|
||||
|
||||
def overlap(group):
|
||||
'''return snwe of the overlap of all acquistions'''
|
||||
|
||||
s = max([(acquistion_snwe(x))[0] for x in group])
|
||||
n = min([(acquistion_snwe(x))[1] for x in group])
|
||||
w = max([(acquistion_snwe(x))[2] for x in group])
|
||||
e = min([(acquistion_snwe(x))[3] for x in group])
|
||||
|
||||
if s >= n or w >= e:
|
||||
#raise Exception('no overlap among the acquistions')
|
||||
print('WARNING: there is no overlap among the acquistions, snwe: {}'.format([s, n, w, e]))
|
||||
|
||||
return [s, n, w, e]
|
||||
|
||||
|
||||
def check_aoi(group, s, n):
|
||||
'''
|
||||
check each group to see if it fully covers [s, n], if not remove the acquistion
|
||||
s: south bound
|
||||
n: north bound
|
||||
'''
|
||||
|
||||
print('\nchecking if each acquistion fully covers user specifed south/north bound [{}, {}]'.format(s, n))
|
||||
|
||||
acquistion_removed_indices = []
|
||||
ngroup = len(group)
|
||||
for i in range(ngroup):
|
||||
snwe = acquistion_snwe(group[i])
|
||||
if not (snwe[0] <= s and snwe[1] >= n):
|
||||
acquistion_removed_indices.append(i)
|
||||
|
||||
#remove acquistions
|
||||
if acquistion_removed_indices != []:
|
||||
group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices]
|
||||
print('acquistions removed:')
|
||||
for i in acquistion_removed_indices:
|
||||
for j in range(len(group[i])):
|
||||
print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1))
|
||||
else:
|
||||
group_new = group
|
||||
print('no acquistions removed')
|
||||
|
||||
return group_new
|
||||
|
||||
|
||||
def check_different_starting_ranges(group):
|
||||
'''
|
||||
checking if there are different starting ranges in each acquistion.
|
||||
'''
|
||||
|
||||
print('\nchecking if there are different starting ranges in each acquistion')
|
||||
|
||||
acquistion_removed_indices = []
|
||||
ngroup = len(group)
|
||||
for i in range(ngroup):
|
||||
ngroupi = len(group[i])
|
||||
for j in range(1, ngroupi):
|
||||
if group[i][0].startingRanges != group[i][j].startingRanges:
|
||||
acquistion_removed_indices.append(i)
|
||||
#print('++++++++++++++{} {}'.format(group[i][0].safe_file, group[i][j].safe_file))
|
||||
break
|
||||
|
||||
#remove acquistions
|
||||
if acquistion_removed_indices != []:
|
||||
group_new = [group[i] for i in range(ngroup) if i not in acquistion_removed_indices]
|
||||
print('acquistions removed:')
|
||||
for i in acquistion_removed_indices:
|
||||
for j in range(len(group[i])):
|
||||
print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1))
|
||||
else:
|
||||
group_new = group
|
||||
print('no acquistions removed')
|
||||
|
||||
return group_new
|
||||
|
||||
|
||||
def check_small_number_of_acquisitions_with_same_starting_ranges(group, threshold=1):
|
||||
'''
|
||||
for the same subswath starting ranges,
|
||||
if the number of acquistions < threshold, remove these acquistions
|
||||
'''
|
||||
|
||||
print('\nchecking small-number of acquistions with same starting ranges')
|
||||
|
||||
ngroup = len(group)
|
||||
|
||||
starting_ranges = [x[0].startingRanges for x in group]
|
||||
|
||||
#get unique starting_ranges
|
||||
starting_ranges_unique = []
|
||||
for i in range(ngroup):
|
||||
if starting_ranges[i] not in starting_ranges_unique:
|
||||
starting_ranges_unique.append(starting_ranges[i])
|
||||
|
||||
#get number of acquistions for each unique starting ranges
|
||||
ngroup_unique = len(starting_ranges_unique)
|
||||
starting_ranges_unique_number = [0 for i in range(ngroup_unique)]
|
||||
for k in range(ngroup_unique):
|
||||
for i in range(ngroup):
|
||||
if starting_ranges_unique[k] == starting_ranges[i]:
|
||||
starting_ranges_unique_number[k] += 1
|
||||
|
||||
#get starting ranges to be removed (number of acquistions < threshold)
|
||||
starting_ranges_removed = []
|
||||
for k in range(ngroup_unique):
|
||||
if starting_ranges_unique_number[k] < threshold:
|
||||
starting_ranges_removed.append(starting_ranges_unique[k])
|
||||
|
||||
#remove acquistions
|
||||
if starting_ranges_removed != []:
|
||||
group_new = [group[i] for i in range(ngroup) if group[i][0].startingRanges not in starting_ranges_removed]
|
||||
|
||||
print('acquistions removed:')
|
||||
for i in range(ngroup):
|
||||
if group[i][0].startingRanges in starting_ranges_removed:
|
||||
for j in range(len(group[i])):
|
||||
print('%s %3d'%(os.path.basename(group[i][j].safe_file), i+1))
|
||||
|
||||
else:
|
||||
group_new = group
|
||||
print('no acquistions removed')
|
||||
|
||||
return group_new
|
||||
|
||||
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser( description='select Sentinel-1A/B acquistions good for ionosphere correction. not used slices are moved to folder: not_used')
|
||||
parser.add_argument('-dir', dest='dir', type=str, required=True,
|
||||
help = 'directory containing the "S1*_IW_SLC_*.zip" files')
|
||||
parser.add_argument('-sn', dest='sn', type=str, required=True,
|
||||
help='south/north bound of area of interest, format: south/north')
|
||||
parser.add_argument('-nr', dest='nr', type=int, default=10,
|
||||
help = 'minimum number of acquisitions for same starting ranges. default: 10')
|
||||
|
||||
if len(sys.argv) <= 1:
|
||||
print('')
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
else:
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
inps = cmdLineParse()
|
||||
s,n=[float(x) for x in inps.sn.split('/')]
|
||||
|
||||
#group the slices
|
||||
group = get_group(inps.dir)
|
||||
safes_all = get_safe_from_group(group)
|
||||
|
||||
#print overlap of group
|
||||
#print('overlap among acquisitions: {}'.format(overlap(group)))
|
||||
|
||||
#print group before removing slices/acquistions
|
||||
print_group(group)
|
||||
|
||||
#do checks and remove the slices/acquisitions
|
||||
group = check_redundancy(group, threshold=1)
|
||||
group = check_version(group)
|
||||
group = check_gap(group)
|
||||
group = check_aoi(group, s, n)
|
||||
group = check_different_starting_ranges(group)
|
||||
group = check_small_number_of_acquisitions_with_same_starting_ranges(group, threshold=inps.nr)
|
||||
|
||||
#print group after removing slices/acquistions
|
||||
print_group(group)
|
||||
|
||||
#move slices that are not used to 'not_used'
|
||||
safes_used = get_safe_from_group(group)
|
||||
not_used_dir = os.path.join(inps.dir, 'not_used')
|
||||
os.makedirs(not_used_dir, exist_ok=True)
|
||||
for safe in safes_all:
|
||||
if safe not in safes_used:
|
||||
shutil.move(safe, not_used_dir)
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -184,6 +184,13 @@ def createParser():
|
|||
parser.add_argument('-rmFilter', '--rmFilter', dest='rmFilter', action='store_true', default=False,
|
||||
help='Make an extra unwrap file in which filtering effect is removed')
|
||||
|
||||
parser.add_argument('--param_ion', dest='param_ion', type=str, default=None,
|
||||
help='ionosphere estimation parameter file. if provided, will do ionosphere estimation.')
|
||||
|
||||
parser.add_argument('--num_connections_ion', dest='num_connections_ion', type=str, default = '3',
|
||||
help='number of interferograms between each date and subsequent dates for ionosphere estimation (default: %(default)s).')
|
||||
|
||||
|
||||
return parser
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
|
@ -439,6 +446,158 @@ def selectNeighborPairs(dateList, num_connections, updateStack=False): # should
|
|||
return pairs
|
||||
|
||||
|
||||
def selectNeighborPairsIonosphere(safe_dict, num_connections):
|
||||
'''
|
||||
safe_dict: returned by def get_dates(inps):
|
||||
num_connetions: number of subsequent dates to pair up with a date
|
||||
|
||||
This routine first groups the Dates. Dates of same starting ranges is put in a group.
|
||||
Pairs within a same group are returned in pairs_same_starting_ranges
|
||||
Pairs connecting different groups are returned in pairs_diff_starting_ranges
|
||||
'''
|
||||
|
||||
#get starting ranges
|
||||
for date in safe_dict:
|
||||
safe_dict[date].get_starting_ranges()
|
||||
|
||||
#get sorted dataList
|
||||
dateList = [key for key in safe_dict.keys()]
|
||||
dateList.sort()
|
||||
ndate = len(dateList)
|
||||
|
||||
#starting ranges sorted by date
|
||||
starting_ranges = [safe_dict[date].startingRanges for date in dateList]
|
||||
|
||||
#get unique starting ranges sorted by date
|
||||
starting_ranges_unique = []
|
||||
for i in range(ndate):
|
||||
if starting_ranges[i] not in starting_ranges_unique:
|
||||
starting_ranges_unique.append(starting_ranges[i])
|
||||
ndate_unique = len(starting_ranges_unique)
|
||||
|
||||
#put dates of same starting ranges in a list
|
||||
#result is a 2-D list, each D is sorted by date
|
||||
starting_ranges_unique_dates = [[] for i in range(ndate_unique)]
|
||||
for k in range(ndate_unique):
|
||||
for i in range(ndate):
|
||||
if starting_ranges_unique[k] == safe_dict[dateList[i]].startingRanges:
|
||||
starting_ranges_unique_dates[k].append(dateList[i])
|
||||
#print(starting_ranges_unique_dates)
|
||||
|
||||
if num_connections == 'all':
|
||||
num_connections = ndate - 1
|
||||
else:
|
||||
num_connections = int(num_connections)
|
||||
|
||||
#1. form all possible pairs, to be used in 3
|
||||
pairs_same_starting_ranges_0 = []
|
||||
pairs_diff_starting_ranges_0 = []
|
||||
for i in range(ndate-1):
|
||||
for j in range(i+1, i+num_connections+1):
|
||||
if j >= ndate:
|
||||
continue
|
||||
same_starting_ranges = False
|
||||
for k in range(ndate_unique):
|
||||
if dateList[i] in starting_ranges_unique_dates[k] and dateList[j] in starting_ranges_unique_dates[k]:
|
||||
same_starting_ranges = True
|
||||
break
|
||||
if same_starting_ranges == True:
|
||||
pairs_same_starting_ranges_0.append((dateList[i],dateList[j]))
|
||||
else:
|
||||
pairs_diff_starting_ranges_0.append((dateList[i],dateList[j]))
|
||||
|
||||
#2. form pairs of same starting ranges
|
||||
pairs_same_starting_ranges = []
|
||||
for k in range(ndate_unique):
|
||||
ndate_unique_k = len(starting_ranges_unique_dates[k])
|
||||
for i in range(ndate_unique_k):
|
||||
for j in range(i+1, i+num_connections+1):
|
||||
if j >= ndate_unique_k:
|
||||
continue
|
||||
pairs_same_starting_ranges.append((starting_ranges_unique_dates[k][i],starting_ranges_unique_dates[k][j]))
|
||||
|
||||
#3. select pairs of diff starting ranges formed in 1 to connect the different starting ranges
|
||||
pairs_diff_starting_ranges = []
|
||||
for k in range(ndate_unique-1):
|
||||
cnt = 0
|
||||
for pair in pairs_diff_starting_ranges_0:
|
||||
if (pair[0] in starting_ranges_unique_dates[k] and pair[1] in starting_ranges_unique_dates[k+1]) or \
|
||||
(pair[1] in starting_ranges_unique_dates[k] and pair[0] in starting_ranges_unique_dates[k+1]):
|
||||
pairs_diff_starting_ranges.append(pair)
|
||||
cnt += 1
|
||||
if cnt >= num_connections:
|
||||
break
|
||||
|
||||
return pairs_same_starting_ranges, pairs_diff_starting_ranges
|
||||
|
||||
|
||||
def excludeExistingPairsIonosphere(pairs_same_starting_ranges, pairs_diff_starting_ranges, work_dir):
|
||||
'''
|
||||
This routine searches for existing pairs for ionosphere estimation and exclude them from
|
||||
pairs_same_starting_ranges and pairs_diff_starting_ranges.
|
||||
'''
|
||||
|
||||
if os.path.isdir(os.path.join(work_dir, 'ion')):
|
||||
print('previous ionosphere estimation directory found')
|
||||
print('exclude already processed pairs for ionosphere estimation')
|
||||
|
||||
pairs = [os.path.basename(p) for p in glob.glob(os.path.join(work_dir, 'ion', '*')) if os.path.isdir(p)]
|
||||
pairs.sort()
|
||||
pairs = [tuple(p.split('_')) for p in pairs]
|
||||
|
||||
pairs_same_starting_ranges_update = [p for p in pairs_same_starting_ranges if p not in pairs]
|
||||
pairs_diff_starting_ranges_update = [p for p in pairs_diff_starting_ranges if p not in pairs]
|
||||
else:
|
||||
pairs_same_starting_ranges_update = pairs_same_starting_ranges
|
||||
pairs_diff_starting_ranges_update = pairs_diff_starting_ranges
|
||||
|
||||
return pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update
|
||||
|
||||
|
||||
def getDatesIonosphere(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update):
|
||||
'''
|
||||
This routine gets all dates associated with ionosphere estimation from
|
||||
pairs_same_starting_ranges_update and pairs_diff_starting_ranges_update
|
||||
'''
|
||||
|
||||
dateListIon = []
|
||||
for pairs in (pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update):
|
||||
for p in pairs:
|
||||
if p[0] not in dateListIon:
|
||||
dateListIon.append(p[0])
|
||||
if p[1] not in dateListIon:
|
||||
dateListIon.append(p[1])
|
||||
|
||||
dateListIon.sort()
|
||||
|
||||
return dateListIon
|
||||
|
||||
|
||||
def checkCurrentStatusIonosphere(inps):
|
||||
|
||||
#can run get_dates multiples times anywhere. it is only associated with inps parameters and safe files, not others
|
||||
acquisitionDates, stackReferenceDate, secondaryDates, safe_dict = get_dates(inps)
|
||||
|
||||
pairs_same_starting_ranges, pairs_diff_starting_ranges = selectNeighborPairsIonosphere(safe_dict, inps.num_connections_ion)
|
||||
pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update = excludeExistingPairsIonosphere(pairs_same_starting_ranges, pairs_diff_starting_ranges, inps.work_dir)
|
||||
dateListIon = getDatesIonosphere(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update)
|
||||
|
||||
#report pairs of different swath starting ranges.
|
||||
pdiff = 'ionosphere phase estimation pairs with different swath starting ranges\n'
|
||||
for p in pairs_diff_starting_ranges:
|
||||
pdiff += '{}_{}\n'.format(p[0], p[1])
|
||||
|
||||
pdiff += '\nionosphere phase estimation pairs with different platforms\n'
|
||||
for p in pairs_same_starting_ranges+pairs_diff_starting_ranges:
|
||||
if safe_dict[p[0]].platform != safe_dict[p[1]].platform:
|
||||
pdiff += '{}_{}\n'.format(p[0], p[1])
|
||||
|
||||
with open('pairs_diff_starting_ranges.txt', 'w') as f:
|
||||
f.write(pdiff)
|
||||
|
||||
return dateListIon, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict
|
||||
|
||||
|
||||
########################################
|
||||
# Below are few workflow examples.
|
||||
|
||||
|
@ -561,6 +720,8 @@ def correlationStack(inps, acquisitionDates, stackReferenceDate, secondaryDates,
|
|||
runObj.filter_coherence(pairs)
|
||||
runObj.finalize()
|
||||
|
||||
return i
|
||||
|
||||
|
||||
def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack):
|
||||
|
||||
|
@ -600,6 +761,8 @@ def interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDate
|
|||
runObj.unwrap(pairs)
|
||||
runObj.finalize()
|
||||
|
||||
return i
|
||||
|
||||
|
||||
def offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack):
|
||||
|
||||
|
@ -621,6 +784,61 @@ def offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe
|
|||
runObj.denseOffsets(pairs)
|
||||
runObj.finalize()
|
||||
|
||||
return i
|
||||
|
||||
|
||||
def ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i):
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_subband_and_resamp'.format(i))
|
||||
runObj.subband_and_resamp(dateListIon, stackReferenceDate)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_generateIgram_ion'.format(i))
|
||||
runObj.generateIgram_ion(pairs_same_starting_ranges_update+pairs_diff_starting_ranges_update, stackReferenceDate)
|
||||
runObj.finalize()
|
||||
|
||||
i += 1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_mergeBurstsIon'.format(i))
|
||||
runObj.mergeBurstsIon(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_unwrap_ion'.format(i))
|
||||
runObj.unwrap_ion(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_look_ion'.format(i))
|
||||
runObj.look_ion(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_computeIon'.format(i))
|
||||
runObj.computeIon(pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_filtIon'.format(i))
|
||||
runObj.filtIon(pairs_same_starting_ranges_update + pairs_diff_starting_ranges_update)
|
||||
runObj.finalize()
|
||||
|
||||
i+=1
|
||||
runObj = run()
|
||||
runObj.configure(inps, 'run_{:02d}_invertIon'.format(i))
|
||||
runObj.invertIon()
|
||||
runObj.finalize()
|
||||
|
||||
return i
|
||||
|
||||
|
||||
def checkCurrentStatus(inps):
|
||||
acquisitionDates, stackReferenceDate, secondaryDates, safe_dict = get_dates(inps)
|
||||
|
@ -767,19 +985,26 @@ def main(iargs=None):
|
|||
print ('*****************************************')
|
||||
if inps.workflow == 'interferogram':
|
||||
|
||||
interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack)
|
||||
i = interferogramStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack)
|
||||
|
||||
elif inps.workflow == 'offset':
|
||||
|
||||
offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack)
|
||||
i = offsetStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack)
|
||||
|
||||
elif inps.workflow == 'correlation':
|
||||
|
||||
correlationStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack)
|
||||
i = correlationStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, pairs, updateStack)
|
||||
|
||||
elif inps.workflow == 'slc':
|
||||
|
||||
slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack, mergeSLC=True)
|
||||
i = slcStack(inps, acquisitionDates, stackReferenceDate, secondaryDates, safe_dict, updateStack, mergeSLC=True)
|
||||
|
||||
|
||||
#do ionosphere estimation
|
||||
if inps.param_ion is not None:
|
||||
dateListIon, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict = checkCurrentStatusIonosphere(inps)
|
||||
i = ionosphereStack(inps, dateListIon, stackReferenceDate, pairs_same_starting_ranges_update, pairs_diff_starting_ranges_update, safe_dict, i)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
|
|
|
@ -0,0 +1,79 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import stdproc
|
||||
from stdproc.stdproc import crossmul
|
||||
import numpy as np
|
||||
from isceobj.Util.Poly2D import Poly2D
|
||||
import argparse
|
||||
import os
|
||||
import copy
|
||||
import s1a_isce_utils as ut
|
||||
from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct
|
||||
|
||||
#it should be OK that function name is the same as script name
|
||||
from subband_and_resamp import subband
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser( description='bandpass filtering burst by burst SLCs ')
|
||||
|
||||
parser.add_argument('-d', '--directory', dest='directory', type=str, required=True,
|
||||
help='Directory with acquisition')
|
||||
|
||||
return parser
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
Create subband burst SLCs.
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
swathList = ut.getSwathList(inps.directory)
|
||||
|
||||
for swath in swathList:
|
||||
acquisition = ut.loadProduct( os.path.join(inps.directory , 'IW{0}.xml'.format(swath)))
|
||||
for burst in acquisition.bursts:
|
||||
|
||||
print("processing swath {}, burst {}".format(swath, os.path.basename(burst.image.filename)))
|
||||
|
||||
outname = burst.image.filename
|
||||
outnameLower = os.path.splitext(outname)[0]+'_lower.slc'
|
||||
outnameUpper = os.path.splitext(outname)[0]+'_upper.slc'
|
||||
if os.path.exists(outnameLower) and os.path.exists(outnameLower+'.vrt') and os.path.exists(outnameLower+'.xml') and \
|
||||
os.path.exists(outnameUpper) and os.path.exists(outnameUpper+'.vrt') and os.path.exists(outnameUpper+'.xml'):
|
||||
print('burst {} already processed, skip...'.format(os.path.basename(burst.image.filename)))
|
||||
continue
|
||||
|
||||
#subband filtering
|
||||
from Stack import ionParam
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * burst.rangePixelSize)
|
||||
|
||||
ionParamObj=ionParam()
|
||||
ionParamObj.configure()
|
||||
outputfile = [outnameLower, outnameUpper]
|
||||
bw = [ionParamObj.rgBandwidthSub / rangeSamplingRate, ionParamObj.rgBandwidthSub / rangeSamplingRate]
|
||||
bc = [-ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate, ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate]
|
||||
rgRef = ionParamObj.rgRef
|
||||
subband(burst, 2, outputfile, bw, bc, rgRef, True)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,304 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2021
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import stdproc
|
||||
from stdproc.stdproc import crossmul
|
||||
import numpy as np
|
||||
from isceobj.Util.Poly2D import Poly2D
|
||||
import argparse
|
||||
import os
|
||||
import copy
|
||||
import s1a_isce_utils as ut
|
||||
from isceobj.Sensor.TOPS import createTOPSSwathSLCProduct
|
||||
|
||||
|
||||
def createParser():
|
||||
parser = argparse.ArgumentParser( description='bandpass filtering and resampling burst by burst SLCs ')
|
||||
|
||||
parser.add_argument('-m', '--reference', dest='reference', type=str, required=True,
|
||||
help='Directory with reference acquisition')
|
||||
|
||||
parser.add_argument('-s', '--secondary', dest='secondary', type=str, required=True,
|
||||
help='Directory with secondary acquisition')
|
||||
|
||||
parser.add_argument('-o', '--coregdir', dest='coreg', type=str, default='coreg_secondary',
|
||||
help='Directory with coregistered SLCs and IFGs')
|
||||
|
||||
parser.add_argument('-a', '--azimuth_misreg', dest='misreg_az', type=str, default=0.0,
|
||||
help='File name with the azimuth misregistration')
|
||||
|
||||
parser.add_argument('-r', '--range_misreg', dest='misreg_rng', type=str, default=0.0,
|
||||
help='File name with the range misregistration')
|
||||
|
||||
parser.add_argument('--noflat', dest='noflat', action='store_true', default=False,
|
||||
help='To turn off flattening. False: flattens the SLC. True: turns off flattening.')
|
||||
|
||||
return parser
|
||||
|
||||
def cmdLineParse(iargs = None):
|
||||
parser = createParser()
|
||||
return parser.parse_args(args=iargs)
|
||||
|
||||
def resampSecondary(mas, slv, rdict, outname, flatten):
|
||||
'''
|
||||
Resample burst by burst.
|
||||
'''
|
||||
|
||||
azpoly = rdict['azpoly']
|
||||
rgpoly = rdict['rgpoly']
|
||||
azcarrpoly = rdict['carrPoly']
|
||||
dpoly = rdict['doppPoly']
|
||||
|
||||
rngImg = isceobj.createImage()
|
||||
rngImg.load(rdict['rangeOff'] + '.xml')
|
||||
rngImg.setAccessMode('READ')
|
||||
|
||||
aziImg = isceobj.createImage()
|
||||
aziImg.load(rdict['azimuthOff'] + '.xml')
|
||||
aziImg.setAccessMode('READ')
|
||||
|
||||
inimg = isceobj.createSlcImage()
|
||||
inimg.load(slv.image.filename + '.xml')
|
||||
inimg.setAccessMode('READ')
|
||||
|
||||
|
||||
rObj = stdproc.createResamp_slc()
|
||||
rObj.slantRangePixelSpacing = slv.rangePixelSize
|
||||
rObj.radarWavelength = slv.radarWavelength
|
||||
rObj.azimuthCarrierPoly = azcarrpoly
|
||||
rObj.dopplerPoly = dpoly
|
||||
|
||||
rObj.azimuthOffsetsPoly = azpoly
|
||||
rObj.rangeOffsetsPoly = rgpoly
|
||||
rObj.imageIn = inimg
|
||||
|
||||
width = mas.numberOfSamples
|
||||
length = mas.numberOfLines
|
||||
imgOut = isceobj.createSlcImage()
|
||||
imgOut.setWidth(width)
|
||||
imgOut.filename = outname
|
||||
imgOut.setAccessMode('write')
|
||||
|
||||
rObj.outputWidth = width
|
||||
rObj.outputLines = length
|
||||
rObj.residualRangeImage = rngImg
|
||||
rObj.residualAzimuthImage = aziImg
|
||||
rObj.flatten = flatten
|
||||
print(rObj.flatten)
|
||||
rObj.resamp_slc(imageOut=imgOut)
|
||||
|
||||
imgOut.renderHdr()
|
||||
imgOut.renderVRT()
|
||||
return imgOut
|
||||
|
||||
|
||||
def subband(burst, nout, outputfile, bw, bc, rgRef, virtual):
|
||||
'''
|
||||
burst: input burst object
|
||||
nout: number of output files
|
||||
outputfile: [value_of_out_1, value_of_out_2, value_of_out_3...] output files
|
||||
bw: [value_of_out_1, value_of_out_2, value_of_out_3...] filter bandwidth divided by sampling frequency [0, 1]
|
||||
bc: [value_of_out_1, value_of_out_2, value_of_out_3...] filter center frequency divided by sampling frequency
|
||||
rgRef: reference range for moving center frequency to zero
|
||||
virtual: True or False
|
||||
'''
|
||||
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
from isceobj.TopsProc.runIon import removeHammingWindow
|
||||
from contrib.alos2proc.alos2proc import rg_filter
|
||||
|
||||
tmpFilename = burst.image.filename + '.tmp'
|
||||
|
||||
#removing window
|
||||
rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * burst.rangePixelSize)
|
||||
if burst.rangeWindowType == 'Hamming':
|
||||
removeHammingWindow(burst.image.filename, tmpFilename, burst.rangeProcessingBandwidth, rangeSamplingRate, burst.rangeWindowCoefficient, virtual=virtual)
|
||||
else:
|
||||
raise Exception('Range weight window type: {} is not supported yet!'.format(burst.rangeWindowType))
|
||||
|
||||
#subband
|
||||
rg_filter(tmpFilename,
|
||||
#burst.numberOfSamples,
|
||||
nout,
|
||||
outputfile,
|
||||
bw,
|
||||
bc,
|
||||
129,
|
||||
512,
|
||||
0.1,
|
||||
0,
|
||||
(burst.startingRange - rgRef) / burst.rangePixelSize
|
||||
)
|
||||
|
||||
os.remove(tmpFilename)
|
||||
os.remove(tmpFilename+'.vrt')
|
||||
os.remove(tmpFilename+'.xml')
|
||||
|
||||
|
||||
def main(iargs=None):
|
||||
'''
|
||||
Create coregistered overlap secondarys.
|
||||
'''
|
||||
inps = cmdLineParse(iargs)
|
||||
referenceSwathList = ut.getSwathList(inps.reference)
|
||||
secondarySwathList = ut.getSwathList(inps.secondary)
|
||||
|
||||
swathList = list(sorted(set(referenceSwathList+secondarySwathList)))
|
||||
|
||||
#if os.path.abspath(inps.reference) == os.path.abspath(inps.secondary):
|
||||
# print('secondary is the same as reference, only performing subband filtering')
|
||||
|
||||
for swath in swathList:
|
||||
|
||||
####Load secondary metadata
|
||||
reference = ut.loadProduct( os.path.join(inps.reference , 'IW{0}.xml'.format(swath)))
|
||||
secondary = ut.loadProduct( os.path.join(inps.secondary , 'IW{0}.xml'.format(swath)))
|
||||
|
||||
|
||||
if os.path.exists(str(inps.misreg_az)):
|
||||
with open(inps.misreg_az, 'r') as f:
|
||||
misreg_az = float(f.readline())
|
||||
else:
|
||||
misreg_az = 0.0
|
||||
|
||||
if os.path.exists(str(inps.misreg_rng)):
|
||||
with open(inps.misreg_rng, 'r') as f:
|
||||
misreg_rg = float(f.readline())
|
||||
else:
|
||||
misreg_rg = 0.0
|
||||
|
||||
###Output directory for coregistered SLCs
|
||||
outdir = os.path.join(inps.coreg,'IW{0}'.format(swath))
|
||||
offdir = os.path.join(inps.coreg,'IW{0}'.format(swath))
|
||||
os.makedirs(outdir, exist_ok=True)
|
||||
|
||||
|
||||
####Indices w.r.t reference
|
||||
burstoffset, minBurst, maxBurst = reference.getCommonBurstLimits(secondary)
|
||||
secondaryBurstStart = minBurst + burstoffset
|
||||
secondaryBurstEnd = maxBurst
|
||||
|
||||
relShifts = ut.getRelativeShifts(reference, secondary, minBurst, maxBurst, secondaryBurstStart)
|
||||
|
||||
print('Shifts: ', relShifts)
|
||||
|
||||
####Can corporate known misregistration here
|
||||
|
||||
apoly = Poly2D()
|
||||
apoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]])
|
||||
|
||||
rpoly = Poly2D()
|
||||
rpoly.initPoly(rangeOrder=0,azimuthOrder=0,coeffs=[[0.]])
|
||||
|
||||
|
||||
#slvCoreg = createTOPSSwathSLCProduct()
|
||||
slvCoreg = ut.coregSwathSLCProduct()
|
||||
slvCoreg.configure()
|
||||
|
||||
|
||||
for ii in range(minBurst, maxBurst):
|
||||
|
||||
outname = os.path.join(outdir, 'burst_%02d.slc'%(ii+1))
|
||||
outnameLower = os.path.splitext(outname)[0]+'_lower.slc'
|
||||
outnameUpper = os.path.splitext(outname)[0]+'_upper.slc'
|
||||
if os.path.exists(outnameLower) and os.path.exists(outnameLower+'.vrt') and os.path.exists(outnameLower+'.xml') and \
|
||||
os.path.exists(outnameUpper) and os.path.exists(outnameUpper+'.vrt') and os.path.exists(outnameUpper+'.xml'):
|
||||
print('burst %02d already processed, skip...'%(ii+1))
|
||||
continue
|
||||
|
||||
jj = secondaryBurstStart + ii - minBurst
|
||||
|
||||
masBurst = reference.bursts[ii]
|
||||
slvBurst = secondary.bursts[jj]
|
||||
|
||||
#####Top burst processing
|
||||
try:
|
||||
offset = relShifts[jj]
|
||||
except:
|
||||
raise Exception('Trying to access shift for secondary burst index {0}, which may not overlap with reference'.format(jj))
|
||||
|
||||
|
||||
####Setup initial polynomials
|
||||
### If no misregs are given, these are zero
|
||||
### If provided, can be used for resampling without running to geo2rdr again for fast results
|
||||
rdict = {'azpoly' : apoly,
|
||||
'rgpoly' : rpoly,
|
||||
'rangeOff' : os.path.join(offdir, 'range_%02d.off'%(ii+1)),
|
||||
'azimuthOff': os.path.join(offdir, 'azimuth_%02d.off'%(ii+1))}
|
||||
|
||||
|
||||
###For future - should account for azimuth and range misreg here .. ignoring for now.
|
||||
azCarrPoly, dpoly = secondary.estimateAzimuthCarrierPolynomials(slvBurst, offset = -1.0 * offset)
|
||||
|
||||
rdict['carrPoly'] = azCarrPoly
|
||||
rdict['doppPoly'] = dpoly
|
||||
|
||||
|
||||
#subband filtering
|
||||
from Stack import ionParam
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
rangeSamplingRate = SPEED_OF_LIGHT / (2.0 * slvBurst.rangePixelSize)
|
||||
|
||||
ionParamObj=ionParam()
|
||||
ionParamObj.configure()
|
||||
lower_tmpfile = os.path.splitext(slvBurst.image.filename)[0]+'_lower_tmp.slc'
|
||||
upper_tmpfile = os.path.splitext(slvBurst.image.filename)[0]+'_upper_tmp.slc'
|
||||
outputfile = [lower_tmpfile, upper_tmpfile]
|
||||
bw = [ionParamObj.rgBandwidthSub / rangeSamplingRate, ionParamObj.rgBandwidthSub / rangeSamplingRate]
|
||||
bc = [-ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate, ionParamObj.rgBandwidthForSplit / 3.0 / rangeSamplingRate]
|
||||
rgRef = ionParamObj.rgRef
|
||||
subband(slvBurst, 2, outputfile, bw, bc, rgRef, True)
|
||||
|
||||
#resampling
|
||||
slvBurst.radarWavelength = ionParamObj.radarWavelengthLower
|
||||
slvBurst.image.filename = lower_tmpfile
|
||||
outnameSubband = outnameLower
|
||||
outimg = resampSecondary(masBurst, slvBurst, rdict, outnameSubband, (not inps.noflat))
|
||||
|
||||
slvBurst.radarWavelength = ionParamObj.radarWavelengthUpper
|
||||
slvBurst.image.filename = upper_tmpfile
|
||||
outnameSubband = outnameUpper
|
||||
outimg = resampSecondary(masBurst, slvBurst, rdict, outnameSubband, (not inps.noflat))
|
||||
|
||||
#remove original subband images
|
||||
os.remove(lower_tmpfile)
|
||||
os.remove(lower_tmpfile+'.vrt')
|
||||
os.remove(lower_tmpfile+'.xml')
|
||||
|
||||
os.remove(upper_tmpfile)
|
||||
os.remove(upper_tmpfile+'.vrt')
|
||||
os.remove(upper_tmpfile+'.xml')
|
||||
|
||||
|
||||
# share IW*.xml with full band images, these are no longer required.
|
||||
#########################################################################################################################################
|
||||
# minAz, maxAz, minRg, maxRg = ut.getValidLines(slvBurst, rdict, outname,
|
||||
# misreg_az = misreg_az - offset, misreg_rng = misreg_rg)
|
||||
|
||||
|
||||
# copyBurst = copy.deepcopy(masBurst)
|
||||
# ut.adjustValidSampleLine_V2(copyBurst, slvBurst, minAz=minAz, maxAz=maxAz,
|
||||
# minRng=minRg, maxRng=maxRg)
|
||||
# copyBurst.image.filename = outimg.filename
|
||||
# print('After: ', copyBurst.firstValidLine, copyBurst.numValidLines)
|
||||
# slvCoreg.bursts.append(copyBurst)
|
||||
|
||||
|
||||
# slvCoreg.numberOfBursts = len(slvCoreg.bursts)
|
||||
# slvCoreg.source = ut.asBaseClass(secondary)
|
||||
# slvCoreg.reference = reference
|
||||
# ut.saveProduct(slvCoreg, outdir + '.xml')
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
# Main Driver
|
||||
main()
|
||||
|
||||
|
||||
|
|
@ -201,7 +201,12 @@ def runUnwrap(infile, outfile, corfile, config, costMode = None,initMethod = Non
|
|||
snp.setDefoMaxCycles(defomax)
|
||||
snp.setRangeLooks(rangeLooks)
|
||||
snp.setAzimuthLooks(azimuthLooks)
|
||||
snp.setCorFileFormat('FLOAT_DATA')
|
||||
|
||||
corImg = isceobj.createImage()
|
||||
corImg.load(corfile + '.xml')
|
||||
if corImg.bands == 1:
|
||||
snp.setCorFileFormat('FLOAT_DATA')
|
||||
|
||||
snp.prepare()
|
||||
snp.unwrap()
|
||||
|
||||
|
@ -334,6 +339,17 @@ def main(iargs=None):
|
|||
metadata = extractInfo(xmlFile, inps)
|
||||
fncall(inps.intfile, inps.unwfile, inps.cohfile, metadata, defomax=inps.defomax)
|
||||
|
||||
#mask out wired values from snaphu
|
||||
intImage = isceobj.createImage()
|
||||
intImage.load(inps.intfile+'.xml')
|
||||
width = intImage.width
|
||||
length = intImage.length
|
||||
|
||||
flag = np.fromfile(inps.intfile, dtype=np.complex64).reshape(length, width)
|
||||
unw=np.memmap(inps.unwfile, dtype='float32', mode='r+', shape=(length*2, width))
|
||||
(unw[0:length*2:2, :])[np.nonzero(flag==0)]=0
|
||||
(unw[1:length*2:2, :])[np.nonzero(flag==0)]=0
|
||||
|
||||
elif inps.method == 'icu':
|
||||
runUnwrapIcu(inps.intfile, inps.unwfile)
|
||||
|
||||
|
|
|
@ -1,115 +1,66 @@
|
|||
FROM hysds/dev:latest
|
||||
FROM ubuntu:20.04 as builder
|
||||
|
||||
# Set an encoding to make things work smoothly.
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV TZ US/Pacific
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# Set ISCE repo
|
||||
ENV ISCE_ORG isce-framework
|
||||
|
||||
# set to root user
|
||||
USER root
|
||||
|
||||
# install tools for RPM generation
|
||||
RUN set -ex \
|
||||
&& yum update -y \
|
||||
&& yum groupinstall -y "development tools" \
|
||||
&& yum install -y \
|
||||
make ruby-devel rpm-build rubygems \
|
||||
&& gem install ffi -v 1.12.2 \
|
||||
&& gem install --no-ri --no-rdoc fpm -v 1.11.0
|
||||
|
||||
# install isce requirements
|
||||
RUN set -ex \
|
||||
&& . /opt/conda/bin/activate root \
|
||||
&& conda install --yes \
|
||||
cython \
|
||||
gdal \
|
||||
git \
|
||||
h5py \
|
||||
libgdal \
|
||||
pytest \
|
||||
numpy \
|
||||
fftw \
|
||||
scipy \
|
||||
scons \
|
||||
hdf4 \
|
||||
hdf5 \
|
||||
libgcc \
|
||||
libstdcxx-ng \
|
||||
cmake \
|
||||
&& yum install -y uuid-devel x11-devel motif-devel jq \
|
||||
opencv opencv-devel opencv-python \
|
||||
&& ln -sf /opt/conda/bin/cython /opt/conda/bin/cython3 \
|
||||
&& mkdir -p /opt/isce2/src
|
||||
|
||||
# override system libuuid into conda env to link in libXm and libXt
|
||||
RUN set -ex \
|
||||
&& cd /opt/conda/lib \
|
||||
&& unlink libuuid.so \
|
||||
&& unlink libuuid.so.1 \
|
||||
&& ln -s /lib64/libuuid.so.1.3.0 libuuid.so \
|
||||
&& ln -s /lib64/libuuid.so.1.3.0 libuuid.so.1
|
||||
|
||||
# install libgfortran.so.3 and create missing link
|
||||
RUN set -ex \
|
||||
&& yum install -y gcc-gfortran \
|
||||
&& cd /lib64 \
|
||||
&& ( test -f libgfortran.so || ln -sv libgfortran.so.*.* libgfortran.so )
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
cmake \
|
||||
cython3 \
|
||||
git \
|
||||
libfftw3-dev \
|
||||
libgdal-dev \
|
||||
libhdf4-alt-dev \
|
||||
libhdf5-dev \
|
||||
libopencv-dev \
|
||||
ninja-build \
|
||||
python3-gdal \
|
||||
python3-h5py \
|
||||
python3-numpy \
|
||||
python3-scipy \
|
||||
&& echo done
|
||||
|
||||
# copy repo
|
||||
COPY . /opt/isce2/src/isce2
|
||||
|
||||
# build ISCE
|
||||
RUN set -ex \
|
||||
&& . /opt/conda/bin/activate root \
|
||||
&& cd /opt/isce2/src/isce2 \
|
||||
&& source docker/build_env.sh \
|
||||
&& mkdir -p $BUILD_DIR \
|
||||
&& cp docker/SConfigISCE configuration/SConfigISCE \
|
||||
&& scons install \
|
||||
&& cp docker/isce_env.sh $ISCE_INSTALL_ROOT \
|
||||
&& cd /tmp \
|
||||
&& mkdir -p /tmp/rpm-build/opt \
|
||||
&& mv $ISCE_INSTALL_ROOT /tmp/rpm-build/opt \
|
||||
&& curl -s https://api.github.com/repos/$ISCE_ORG/isce2/git/refs/heads/main \
|
||||
> /tmp/rpm-build/opt/isce2/version.json \
|
||||
&& hash=$(cat /tmp/rpm-build/opt/isce2/version.json | jq -r .object.sha) \
|
||||
&& short_hash=$(echo $hash | cut -c1-5) \
|
||||
&& fpm -s dir -t rpm -C /tmp/rpm-build --name isce \
|
||||
--prefix=/ --version=2.3 --provides=isce \
|
||||
--maintainer=piyush.agram@jpl.nasa.gov \
|
||||
--description="InSAR Scientific Computing Environment v2 (${hash})"
|
||||
&& mkdir build && cd build \
|
||||
&& cmake .. \
|
||||
-DPYTHON_MODULE_DIR="$(python3 -c 'import site; print(site.getsitepackages()[-1])')" \
|
||||
-DCMAKE_INSTALL_PREFIX=install \
|
||||
&& make -j8 install \
|
||||
&& cpack -G DEB \
|
||||
&& cp isce*.deb /tmp/
|
||||
|
||||
FROM hysds/pge-base:latest
|
||||
FROM ubuntu:20.04
|
||||
|
||||
# Set an encoding to make things work smoothly.
|
||||
ENV LANG en_US.UTF-8
|
||||
ENV TZ US/Pacific
|
||||
ARG DEBIAN_FRONTEND=noninteractive
|
||||
|
||||
# install ISCE from RPM
|
||||
COPY --from=0 /tmp/isce-2.3-1.x86_64.rpm /tmp/isce-2.3-1.x86_64.rpm
|
||||
|
||||
# install isce and its minimal requirements
|
||||
RUN set -ex \
|
||||
&& sudo /opt/conda/bin/conda install --yes \
|
||||
gdal \
|
||||
h5py \
|
||||
libgdal \
|
||||
pytest \
|
||||
numpy \
|
||||
fftw \
|
||||
scipy \
|
||||
hdf4 \
|
||||
hdf5 \
|
||||
&& sudo yum update -y \
|
||||
&& sudo yum install -y uuid-devel x11-devel motif-devel gcc-gfortran \
|
||||
&& cd /opt/conda/lib \
|
||||
&& sudo unlink libuuid.so \
|
||||
&& sudo unlink libuuid.so.1 \
|
||||
&& sudo ln -s /lib64/libuuid.so.1.3.0 libuuid.so \
|
||||
&& sudo ln -s /lib64/libuuid.so.1.3.0 libuuid.so.1 \
|
||||
&& cd /lib64 \
|
||||
&& ( test -f libgfortran.so || sudo ln -sv libgfortran.so.*.* libgfortran.so ) \
|
||||
&& sudo yum install -y /tmp/isce-2.3-1.x86_64.rpm \
|
||||
&& sudo yum clean all \
|
||||
&& sudo rm -rf /var/cache/yum \
|
||||
&& sudo rm /tmp/isce-2.3-1.x86_64.rpm
|
||||
&& apt-get update \
|
||||
&& apt-get install -y \
|
||||
libfftw3-3 \
|
||||
libgdal26 \
|
||||
libhdf4-0 \
|
||||
libhdf5-103 \
|
||||
libopencv-core4.2 \
|
||||
libopencv-highgui4.2 \
|
||||
libopencv-imgproc4.2 \
|
||||
python3-gdal \
|
||||
python3-h5py \
|
||||
python3-numpy \
|
||||
python3-scipy \
|
||||
&& echo done
|
||||
|
||||
# install ISCE from DEB
|
||||
COPY --from=builder /tmp/isce*.deb /tmp/isce2.deb
|
||||
|
||||
RUN dpkg -i /tmp/isce2.deb
|
||||
|
|
Loading…
Reference in New Issue