Merge branch 'master' into BLM

LT1AB
Heresh Fattahi 2020-07-02 15:03:10 -07:00 committed by GitHub
commit eb3eff61a6
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
21 changed files with 384 additions and 179 deletions

View File

@ -5,6 +5,9 @@ set(components
find_package(X11 COMPONENTS ${components})
# The X11::{component} targets only exist for CMake 3.14,
# so we create them here for backwards compatibility.
if(X11_FOUND)
# make X11 look like a regular find_package component
@ -13,11 +16,9 @@ if(X11_FOUND)
list(APPEND components X11)
foreach(component ${components})
message("${component} include = ${X11_${component}_INCLUDE_PATH}")
if(X11_${component}_FOUND)
if(NOT TARGET X11::${component})
add_library(X11::${component} IMPORTED INTERFACE)
endif()
if(X11_${component}_FOUND AND
NOT TARGET X11::${component})
add_library(X11::${component} IMPORTED INTERFACE)
target_link_libraries(X11::${component}
INTERFACE ${X11_${component}_LIB})
target_include_directories(X11::${component} SYSTEM

View File

@ -14,13 +14,15 @@ set(CMAKE_CXX_STANDARD 11)
set(CMAKE_CXX_STANDARD_REQUIRED y)
set(CMAKE_CXX_EXTENSIONS n)
# TODO (fix RPATHs)
# We have to hack our RPATHs a bit for these shared libraries to be
# loaded by others on the install-side. Maybe these libraries should
# be combined and/or installed to a common ISCE2 lib directory.
# Is there a semantic way to propagate their RPATHs
# without using these global variables?
include(GNUInstallDirs)
# add automatically determined parts of the RPATH, which point to directories
# outside of the build tree, to the install RPATH
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH ON)
list(APPEND CMAKE_INSTALL_RPATH
${CMAKE_INSTALL_PREFIX}/${ISCE2_PKG}/components/isceobj/Util
)
# the RPATH to be used when installing, but only if it's not a system directory
set(abs_libdir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR})
list(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES ${abs_libdir} isSystemDir)
if("${isSystemDir}" STREQUAL "-1")
list(APPEND CMAKE_INSTALL_RPATH ${abs_libdir})
endif()

View File

@ -50,22 +50,16 @@ endfunction()
# Note that it first checks if a provided file is a target,
# and if so, installs it as a TARGET instead. Make sure your
# filenames and target names don't have any overlap!
function(InstallSameDir)
mark_as_advanced(isce2_bin_base)
foreach(name ${ARGN})
if(TARGET ${name})
set(installtype TARGETS)
else()
set(installtype FILES)
endif()
file(RELATIVE_PATH path ${isce2_bin_dir} ${CMAKE_CURRENT_BINARY_DIR})
file(RELATIVE_PATH path ${isce2_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR})
install(${installtype} ${name}
DESTINATION ${ISCE2_PKG}/${path}
)
endforeach()
endfunction()
# We use this instead of CMAKE_BINARY_DIR to handle
# cases where isce2 is added as a subdirectory
set(isce2_bin_dir ${CMAKE_CURRENT_BINARY_DIR} CACHE PATH
"ISCE2 root build directory")

View File

@ -2,15 +2,19 @@ cmake_minimum_required(VERSION 3.12 FATAL_ERROR)
project(isce2 LANGUAGES C CXX Fortran)
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}/.cmake)
include(CheckLanguage)
check_language(CUDA)
if(CMAKE_CUDA_COMPILER)
set(CMAKE_CUDA_STANDARD 11)
set(CMAKE_CUDA_STANDARD_REQUIRED TRUE)
enable_language(CUDA)
find_package(CUDAToolkit) # TODO added in cmake 3.17 - copy this module
endif()
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}/.cmake)
find_package(Python 3.5 REQUIRED COMPONENTS Interpreter Development)
find_package(Python 3.5 REQUIRED COMPONENTS Interpreter Development
OPTIONAL_COMPONENTS NumPy)
find_package(FFTW REQUIRED)
find_package(Motif)
find_package(OpenMP REQUIRED COMPONENTS C CXX Fortran)

View File

@ -33,8 +33,10 @@ TERRASARX, and UAVSAR.
- [Note On 'python3' Exectuable Convention](#python3-convention)
- [License required for dependencies to enable some workflows in ISCE](#license-required-for-dependencies-to-enable-some-workflows-in-isce)
2. [Building ISCE](#building-isce)
- [Configuration control: SCONS\_CONFIG\_DIR and SConfigISCE](#configuration-control)
- [Install ISCE](#install-isce)
- [SCons](#scons-recommended)
- [Configuration control: SCONS\_CONFIG\_DIR and SConfigISCE](#configuration-control)
- [Install ISCE](#install-isce)
- [CMake](#cmake-experimental)
- [Setup Your Environment](#setup-your-environment)
3. [Running ISCE](#running-isce)
- [Running ISCE from the command line](#running-isce-from-the-command-line)
@ -192,7 +194,9 @@ the older data with the same workflows available in this open source release.
## Building ISCE
### Configuration control
### SCons (recommended)
#### Configuration control
Scons requires that configuration information be present in a directory
specified by the environment variable SCONS\_CONFIG\_DIR. First, create a
@ -252,7 +256,7 @@ and the install files. Also, in the following the capitalization of 'isce' as
lower case does matter. This is the case-sensitive package name that Python
code uses for importing isce.
### Install ISCE
#### Install ISCE
cd isce
scons install
@ -273,7 +277,7 @@ This will build the necessary components and install them into the location
specified in the configuration file as PRJ\_SCONS\_INSTALL.
#### Note about compiling ISCE after an unsuccessful build.
##### Note about compiling ISCE after an unsuccessful build.
When building ISCE, scons will check the list of header files and libraries that
ISCE requires. Scons will cache the results of this dependency checking. So,
@ -290,6 +294,47 @@ directory containing the SConstruct file):
and then try "scons install" again.
### CMake (experimental)
Make sure you have the following prerequisites:
* CMake ≥ 3.12
* GCC ≥ 4.8 (with C++11 support)
* Python ≥ 3.5
* Cython
* FFTW 3
* GDAL
```sh
git clone https://github.com/isce-framework/isce2
cd isce2
mkdir build
cd build
cmake .. -DCMAKE_INSTALL_PREFIX=/my/isce/install/location
make install
```
#### Additional cmake configuration options
CMake uses `CMAKE_PREFIX_PATH` as a global prefix for finding packages,
which can come in handy when using e.g. Anaconda:
```sh
cmake [...] -DCMAKE_PREFIX_PATH=$CONDA_PREFIX
```
On macOS, cmake will also look for systemwide "frameworks",
which is usually not what you want when using Conda or Macports.
```sh
cmake [...] -DCMAKE_FIND_FRAMEWORK=NEVER
```
For packagers, the `PYTHON_MODULE_DIR` can be used to specify ISCE2's
package installation location relative to the installation prefix
```sh
cmake [...] -DPYTHON_MODULE_DIR=lib/python3.8m/site-packages
```
### Setup Your Environment
Once everything is installed, you will need to set the following environment

View File

@ -2,7 +2,14 @@ add_subdirectory(db)
add_subdirectory(TOPS)
add_subdirectory(MultiMode)
add_library(asa_im_decode src/asa_im_decode/asa_im_decode.c)
set_target_properties(asa_im_decode PROPERTIES
PREFIX ""
OUTPUT_NAME envisat
SUFFIX .so)
set(installfiles
asa_im_decode
alos
__init__.py
ALOS.py

View File

@ -81,15 +81,9 @@ add_library(utilLib SHARED
target_include_directories(utilLib PUBLIC
include
)
target_link_libraries(utilLib PUBLIC
target_link_libraries(utilLib PRIVATE
FFTW::Float
)
# TODO (fortran module include)
# This seems to be needed to use this library's modules,
# but is there a more idiomatic way to do this?
target_include_directories(utilLib INTERFACE
${CMAKE_CURRENT_BINARY_DIR}
)
add_library(combinedLib SHARED
Library/geometry/src/geometryModule.F
@ -120,17 +114,24 @@ target_include_directories(combinedlibmodule PUBLIC
target_link_libraries(combinedlibmodule PUBLIC
combinedLib
)
# TODO (fortran module include)
# This seems to be needed to use this library's modules,
# but is there a more idiomatic way to do this?
# Set up fortran module paths
set(mdir ${CMAKE_CURRENT_BINARY_DIR}/utillib_fortran_modules)
set_property(TARGET utilLib PROPERTY Fortran_MODULE_DIRECTORY ${mdir})
target_include_directories(utilLib INTERFACE
$<$<COMPILE_LANGUAGE:Fortran>:${mdir}>
)
set(mdir ${CMAKE_CURRENT_BINARY_DIR}/combinelib_fortran_modules)
set_property(TARGET combinedLib PROPERTY Fortran_MODULE_DIRECTORY ${mdir})
target_include_directories(combinedLib INTERFACE
${CMAKE_CURRENT_BINARY_DIR}
$<$<COMPILE_LANGUAGE:Fortran>:${mdir}>
)
install(TARGETS
utilLib
combinedLib
LIBRARY DESTINATION lib)
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR}
)
InstallSameDir(
combinedlibmodule

View File

@ -8,15 +8,14 @@ add_library(formslcLib SHARED
formslcLib/src/io.c
)
set_property(TARGET formslcLib PROPERTY POSITION_INDEPENDENT_CODE ON)
target_include_directories(formslcLib PRIVATE formslcLib/include)
target_link_libraries(formslcLib PUBLIC
utilLib
)
# TODO (fortran module include)
# This seems to be needed to use this library's modules,
# but is there a more idiomatic way to do this?
set(mdir ${CMAKE_CURRENT_BINARY_DIR}/formslc_fortran_modules)
set_property(TARGET formslcLib PROPERTY Fortran_MODULE_DIRECTORY ${mdir})
target_include_directories(formslcLib INTERFACE
${CMAKE_CURRENT_BINARY_DIR}
$<$<COMPILE_LANGUAGE:Fortran>:${mdir}>
)
add_subdirectory(correct)

View File

@ -3,7 +3,13 @@ add_subdirectory(geozero)
add_subdirectory(topozero)
if(CMAKE_CUDA_COMPILER)
# add_subdirectory(GPUampcor) TODO cublas_device removed from CUDA 10
# cublas_device removed from CUDA 10
if(CMAKE_CUDA_COMPILER_ID STREQUAL "NVIDIA" AND
CMAKE_CUDA_COMPILER_VERSION VERSION_LESS 10)
add_subdirectory(GPUampcor)
endif()
add_subdirectory(GPUgeo2rdr)
endif()

View File

@ -1,3 +1,9 @@
if(NOT TARGET CUDA::cublas)
return()
endif()
return() # TODO get cublas_device working or remove usage of it
cython_add_module(GPUampcor
GPUampcor.pyx
cuda/GPUamp.cu
@ -9,6 +15,11 @@ target_include_directories(GPUampcor PUBLIC
include
)
target_link_libraries(GPUampcor PRIVATE
cublas
CUDA::cublas
DataAccessor_static
FFTW::Float
)
InstallSameDir(
GPUampcor
__init__.py
)

View File

@ -5,7 +5,7 @@ add_subdirectory(frameUtils)
#add_subdirectory(unwUtils)
add_subdirectory(downsample_unwrapper)
#add_subdirectory(PyCuAmpcor)
add_subdirectory(PyCuAmpcor)
add_subdirectory(splitSpectrum)
add_subdirectory(alos2filter)
add_subdirectory(alos2proc)

View File

@ -0,0 +1,46 @@
# Early exit if prereqs not available
if(NOT TARGET GDAL::GDAL
OR NOT TARGET Python::NumPy
OR NOT TARGET CUDA::cublas
OR NOT TARGET CUDA::cufft
)
return()
endif()
set(CMAKE_CUDA_STANDARD 11)
set(CMAKE_CUDA_STANDARD_REQUIRED TRUE)
cython_add_module(PyCuAmpcor
src/PyCuAmpcor.pyx
src/GDALImage.cu
src/SConscript
src/SlcImage.cu
src/cuAmpcorChunk.cu
src/cuAmpcorController.cu
src/cuAmpcorParameter.cu
src/cuArrays.cu
src/cuArraysCopy.cu
src/cuArraysPadding.cu
src/cuCorrFrequency.cu
src/cuCorrNormalization.cu
src/cuCorrTimeDomain.cu
src/cuDeramp.cu
src/cuEstimateStats.cu
src/cuOffset.cu
src/cuOverSampler.cu
src/cuSincOverSampler.cu
)
target_include_directories(PyCuAmpcor PRIVATE
src
)
target_link_libraries(PyCuAmpcor PRIVATE
CUDA::cufft
CUDA::cublas
GDAL::GDAL
Python::NumPy
)
InstallSameDir(
__init__.py
PyCuAmpcor
)

View File

@ -1,109 +1,135 @@
#!/usr/bin/env python3
# Author: Minyan Zhong, Lijun Zhu
# Author: Minyan Zhong, Lijun Zhu
import argparse
import os
import argparse
import numpy as np
import isce
import isceobj
from isceobj.Util.decorators import use_api
import numpy as np
from contrib.PyCuAmpcor.PyCuAmpcor import PyCuAmpcor
EXAMPLE = '''example
cuDenseOffsets.py -m ./merged/SLC/20151120/20151120.slc.full -s ./merged/SLC/20151214/20151214.slc.full
--referencexml ./reference/IW1.xml --outprefix ./merged/offsets/20151120_20151214/offset
--ww 256 --wh 256 --oo 32 --kw 300 --kh 100 --nwac 100 --nwdc 1 --sw 8 --sh 8 --gpuid 2
'''
def createParser():
'''
Command line parser.
'''
parser = argparse.ArgumentParser( description='Generate offset field between two Sentinel slc')
parser = argparse.ArgumentParser(description='Generate offset field between two Sentinel slc',
formatter_class=argparse.RawTextHelpFormatter,
epilog=EXAMPLE)
parser.add_argument('-m','--reference', type=str, dest='reference', required=True,
help='Reference image')
help='Reference image')
parser.add_argument('-s', '--secondary',type=str, dest='secondary', required=True,
help='Secondary image')
help='Secondary image')
parser.add_argument('-l', '--lat',type=str, dest='lat', required=False,
help='Latitude')
help='Latitude')
parser.add_argument('-L', '--lon',type=str, dest='lon', required=False,
help='Longitude')
help='Longitude')
parser.add_argument('--los',type=str, dest='los', required=False,
help='Line of Sight')
parser.add_argument('--referencexml',type=str, dest='referencexml', required=False,
help='Reference Image Xml File')
help='Line of Sight')
parser.add_argument('-x', '--referencexml',type=str, dest='referencexml', required=False,
help='Reference Image XML File')
parser.add_argument('--op','--outprefix','--output-prefix', type=str, dest='outprefix',
default='offset', required=True,
help='Output prefix, default: offset.')
parser.add_argument('--os','--outsuffix', type=str, dest='outsuffix', default='',
help='Output suffix, default:.')
parser.add_argument('--ww', type=int, dest='winwidth', default=64,
help='Window Width')
help='Window width (default: %(default)s).')
parser.add_argument('--wh', type=int, dest='winhgt', default=64,
help='Window height')
parser.add_argument('--sw', type=int, dest='srcwidth', default=20,
help='Search window width')
parser.add_argument('--sh', type=int, dest='srchgt', default=20,
help='Search window height')
help='Window height (default: %(default)s).')
parser.add_argument('--sw', type=int, dest='srcwidth', default=20, choices=range(8, 33),
help='Search window width (default: %(default)s).')
parser.add_argument('--sh', type=int, dest='srchgt', default=20, choices=range(8, 33),
help='Search window height (default: %(default)s).')
parser.add_argument('--mm', type=int, dest='margin', default=50,
help='Margin')
help='Margin (default: %(default)s).')
parser.add_argument('--kw', type=int, dest='skipwidth', default=64,
help='Skip across')
help='Skip across (default: %(default)s).')
parser.add_argument('--kh', type=int, dest='skiphgt', default=64,
help='Skip down')
help='Skip down (default: %(default)s).')
parser.add_argument('--raw-osf','--raw-over-samp-factor', type=int, dest='raw_oversample',
default=2, choices=range(2,5),
help='raw data oversampling factor (default: %(default)s).')
gross = parser.add_argument_group('Initial gross offset')
gross.add_argument('-g','--gross', type=int, dest='gross', default=0,
help='Use gross offset or not')
gross.add_argument('--aa', type=int, dest='azshift', default=0,
help='Gross azimuth offset (default: %(default)s).')
gross.add_argument('--rr', type=int, dest='rgshift', default=0,
help='Gross range offset (default: %(default)s).')
corr = parser.add_argument_group('Correlation surface')
corr.add_argument('--corr-win-size', type=int, dest='corr_win_size', default=-1,
help='Zoom-in window size of the correlation surface for oversampling (default: %(default)s).')
corr.add_argument('--corr-osf', '--oo', '--corr-over-samp-factor', type=int, dest='corr_oversample', default=32,
help = 'Oversampling factor of the zoom-in correlation surface (default: %(default)s).')
parser.add_argument('--nwa', type=int, dest='numWinAcross', default=-1,
help='Number of Window Across')
help='Number of window across (default: %(default)s).')
parser.add_argument('--nwd', type=int, dest='numWinDown', default=-1,
help='Number of Window Down')
help='Number of window down (default: %(default)s).')
parser.add_argument('--nwac', type=int, dest='numWinAcrossInChunk', default=1,
help='Number of Window Across in Chunk')
help='Number of window across in chunk (default: %(default)s).')
parser.add_argument('--nwdc', type=int, dest='numWinDownInChunk', default=1,
help='Number of Window Down in Chunk')
help='Number of window down in chunk (default: %(default)s).')
parser.add_argument('-r', '--redo', dest='redo', action='store_true',
help='To redo by force (ignore the existing offset fields).')
parser.add_argument('-op','--outprefix', type=str, dest='outprefix', default='dense_ampcor', required=True,
help='Output prefix')
parser.add_argument('--drmp', '--deramp', dest='deramp', type=int, default=0,
help='deramp method (0: mag, 1: complex) (default: %(default)s).')
parser.add_argument('-os','--outsuffix', type=str, dest='outsuffix',default='dense_ampcor',
help='Output suffix')
parser.add_argument('-g','--gross', type=int, dest='gross', default=0,
help='Use gross offset or not')
parser.add_argument('--aa', type=int, dest='azshift', default=0,
help='Gross azimuth offset')
parser.add_argument('--rr', type=int, dest='rgshift', default=0,
help='Gross range offset')
parser.add_argument('--oo', type=int, dest='oversample', default=32,
help = 'Oversampling factor')
parser.add_argument('-r', '--redo', dest='redo', type=int, default=0
, help='To redo or not')
parser.add_argument('-drmp', '--deramp', dest='deramp', type=int, default=0
, help='deramp method (0: mag, 1: complex)')
parser.add_argument('-gid', '--gpuid', dest='gpuid', type=int, default=-1
, help='GPU ID')
parser.add_argument('--gpuid', '--gid', '--gpu-id', dest='gpuid', type=int, default=-1,
help='GPU ID (default: %(default)s).')
return parser
def cmdLineParse(iargs = None):
parser = createParser()
inps = parser.parse_args(args=iargs)
# check oversampled window size
if (inps.winwidth + 2 * inps.srcwidth) * inps.raw_oversample > 1024:
msg = 'input oversampled window size in the across/range direction '
msg += 'exceeds the current implementaion limit of 1024!'
raise ValueError(msg)
return inps
@use_api
def estimateOffsetField(reference, secondary, inps=None):
import pathlib
###Loading the secondary image object
sim = isceobj.createSlcImage()
sim.load(pathlib.Path(secondary).with_suffix('.xml'))
sim.load(secondary+'.xml')
sim.setAccessMode('READ')
sim.createImage()
###Loading the reference image object
sar = isceobj.createSlcImage()
sar.load(pathlib.Path(reference).with_suffix('.xml'))
sar.load(reference+'.xml')
sar.setAccessMode('READ')
sar.createImage()
@ -111,16 +137,18 @@ def estimateOffsetField(reference, secondary, inps=None):
length = sar.getLength()
objOffset = PyCuAmpcor()
objOffset.algorithm = 0
objOffset.deviceID = inps.gpuid # -1:let system find the best GPU
objOffset.nStreams = 1 #cudaStreams
objOffset.nStreams = 2 #cudaStreams
objOffset.derampMethod = inps.deramp
print('deramp method (0 for magnitude, 1 for complex): ', objOffset.derampMethod)
objOffset.referenceImageName = reference
objOffset.referenceImageName = reference+'.vrt'
objOffset.referenceImageHeight = length
objOffset.referenceImageWidth = width
objOffset.secondaryImageName = secondary
objOffset.secondaryImageName = secondary+'.vrt'
objOffset.secondaryImageHeight = length
objOffset.secondaryImageWidth = width
@ -132,39 +160,52 @@ def estimateOffsetField(reference, secondary, inps=None):
if (inps.numWinDown != -1):
objOffset.numberWindowDown = inps.numWinDown
if (inps.numWinAcross != -1):
objOffset.numberWindowAcross = inps.numWinAcross
print("offset field length: ",objOffset.numberWindowDown)
print("offset field width: ",objOffset.numberWindowAcross)
# window size
objOffset.windowSizeHeight = inps.winhgt
objOffset.windowSizeWidth = inps.winwidth
print('cross correlation window size: {} by {}'.format(objOffset.windowSizeHeight, objOffset.windowSizeWidth))
# search range
objOffset.halfSearchRangeDown = inps.srchgt
objOffset.halfSearchRangeAcross = inps.srcwidth
print('half search range: {} by {}'.format(inps.srchgt, inps.srcwidth))
# starting pixel
objOffset.referenceStartPixelDownStatic = inps.margin
objOffset.referenceStartPixelAcrossStatic = inps.margin
# skip size
objOffset.skipSampleDown = inps.skiphgt
objOffset.skipSampleAcross = inps.skipwidth
print('search step: {} by {}'.format(inps.skiphgt, inps.skipwidth))
# oversample raw data (SLC)
objOffset.rawDataOversamplingFactor = inps.raw_oversample
print('raw data oversampling factor:', inps.raw_oversample)
# correlation surface
if inps.corr_win_size == -1:
corr_win_size_orig = min(inps.srchgt, inps.srcwidth) * inps.raw_oversample + 1
inps.corr_win_size = np.power(2, int(np.log2(corr_win_size_orig)))
objOffset.corrSurfaceZoomInWindow = inps.corr_win_size
print('correlation surface zoom-in window size:', inps.corr_win_size)
# oversampling
objOffset.corrSufaceOverSamplingMethod = 0
objOffset.corrSurfaceOverSamplingFactor = inps.oversample
objOffset.corrSurfaceOverSamplingFactor = inps.corr_oversample
print('correlation surface oversampling factor:', inps.corr_oversample)
# output filenames
objOffset.offsetImageName = str(inps.outprefix) + str(inps.outsuffix) + '.bip'
objOffset.grossOffsetImageName = str(inps.outprefix) + str(inps.outsuffix) + '_gross.bip'
objOffset.snrImageName = str(inps.outprefix) + str(inps.outsuffix) + '_snr.bip'
objOffset.covImageName = str(inps.outprefix) + str(inps.outsuffix) + '_cov.bip'
print("offsetfield: ",objOffset.offsetImageName)
print("gross offsetfield: ",objOffset.grossOffsetImageName)
print("snr: ",objOffset.snrImageName)
@ -175,44 +216,45 @@ def estimateOffsetField(reference, secondary, inps=None):
snrImageName = objOffset.snrImageName.decode('utf8')
covImageName = objOffset.covImageName.decode('utf8')
if os.path.exists(offsetImageName) and inps.redo==0:
print(offsetImageName)
print(inps.redo)
if os.path.exists(offsetImageName) and not inps.redo:
print('offsetfield file exists')
exit()
return 0
# generic control
objOffset.numberWindowDownInChunk = inps.numWinDownInChunk
objOffset.numberWindowAcrossInChunk = inps.numWinAcrossInChunk
objOffset.useMmap = 0
objOffset.mmapSize = 8
objOffset.setupParams()
## Set Gross Offset ###
if inps.gross == 0:
print("Set constant grossOffset")
print("By default, the gross offsets are zero")
print("You can override the default values here")
objOffset.setConstantGrossOffset(0, 0)
else:
print("Set varying grossOffset")
print("By default, the gross offsets are zero")
print("You can override the default grossDown and grossAcross arrays here")
objOffset.setVaryingGrossOffset(np.zeros(shape=grossDown.shape,dtype=np.int32), np.zeros(shape=grossAcross.shape,dtype=np.int32))
# check
objOffset.setVaryingGrossOffset(np.zeros(shape=grossDown.shape,dtype=np.int32),
np.zeros(shape=grossAcross.shape,dtype=np.int32))
# check
objOffset.checkPixelInImageRange()
# Run the code
print('Running PyCuAmpcor')
objOffset.runAmpcor()
objOffset.runAmpcor()
print('Finished')
sar.finalizeImage()
sim.finalizeImage()
# Finalize the results
# offsetfield
outImg = isceobj.createImage()
@ -257,18 +299,22 @@ def estimateOffsetField(reference, secondary, inps=None):
covImg.setAccessMode('read')
covImg.renderHdr()
return 0
def main(iargs=None):
return
def main(iargs=None):
inps = cmdLineParse(iargs)
outDir = os.path.dirname(inps.outprefix)
print(inps.outprefix)
if not os.path.exists(outDir):
os.makedirs(outDir)
os.makedirs(outDir, exist_ok=True)
estimateOffsetField(inps.reference, inps.secondary, inps)
return
if __name__ == '__main__':
main()

View File

@ -17,8 +17,31 @@ Python_add_library(demStitch MODULE
demstitcher/bindings/demStitch.c
)
Python_add_library(upsampledem MODULE
upsampledem/bindings/upsampledemmodule.cpp
upsampledem/src/upsampledem.f
upsampledem/src/upsampledemSetState.f
upsampledem/src/upsampledemState.f
)
target_include_directories(upsampledem PRIVATE
upsampledem/include
)
target_link_libraries(upsampledem PRIVATE
utilLib
)
Python_add_library(watermask MODULE
watermask/bindings/watermaskmodule.cpp
watermask/src/watermask.cpp
)
target_include_directories(watermask PRIVATE
watermask/include
)
InstallSameDir(
demStitch
upsampledem
watermask
correct_geoid_i2_srtm
__init__.py
correct_geoid_i2_srtm/Correct_geoid_i2_srtm.py
@ -27,6 +50,5 @@ InstallSameDir(
demstitcher/DemStitcherV3.py
swbdstitcher/SWBDStitcher.py
upsampledem/UpsampleDem.py
watermask/test/mask.py
watermask/WaterMask.py
)

View File

@ -8,12 +8,19 @@ add_library(issi SHARED
src/polcal.c
src/tecToPhase.c
)
set_target_properties(issi PROPERTIES
PREFIX ""
OUTPUT_NAME issi
SUFFIX .so)
target_include_directories(issi PUBLIC include)
add_subdirectory(src)
InstallSameDir(
issi
__init__.py
FR.py
)
file(RELATIVE_PATH relpath ${isce2_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR})
install(DIRECTORY src/igrf_data
DESTINATION ${ISCE2_PKG}/${relpath}
)

View File

@ -1 +0,0 @@
add_subdirectory(igrf_data)

View File

@ -1,18 +0,0 @@
InstallSameDir(
dgrf00.dat
dgrf45.dat
dgrf50.dat
dgrf55.dat
dgrf60.dat
dgrf65.dat
dgrf70.dat
dgrf75.dat
dgrf80.dat
dgrf85.dat
dgrf90.dat
dgrf95.dat
igrf05.dat
igrf05full.dat
igrf05s.dat
igrf10.dat
)

View File

@ -3,8 +3,8 @@
# Author: Heresh Fattahi
# Copyright 2016
#
import os
import os
import argparse
import numpy as np
from scipy import ndimage
@ -32,7 +32,14 @@ GDAL2NUMPY_DATATYPE = {
7 : np.float64,
10: np.complex64,
11: np.complex128,
}
}
EXAMPLE = '''example:
MaskAndFilter.py -d offset.bip -s offset_snr.bip
MaskAndFilter.py -d offset.bip -s offset_snr.bip --plot
'''
EXAMPLE = '''example:
@ -42,10 +49,10 @@ EXAMPLE = '''example:
def createParser():
'''
'''
Command line parser.
'''
parser = argparse.ArgumentParser(description='Mask and filter the densOffset',
formatter_class=argparse.RawTextHelpFormatter,
epilog=EXAMPLE)
@ -88,7 +95,7 @@ def cmdLineParse(iargs = None):
def read(file, processor='ISCE', bands=None, dataType=None):
''' raeder based on GDAL.
Args:
* file -> File name to be read
@ -96,7 +103,7 @@ def read(file, processor='ISCE', bands=None, dataType=None):
Kwargs:
* processor -> the processor used for the InSAR processing. default: ISCE
* bands -> a list of bands to be extracted. If not specified all bands will be extracted.
* bands -> a list of bands to be extracted. If not specified all bands will be extracted.
* dataType -> if not specified, it will be extracted from the data itself
Returns:
* data : A numpy array with dimensions : number_of_bands * length * width
@ -116,7 +123,7 @@ def read(file, processor='ISCE', bands=None, dataType=None):
if bands is None:
bands = range(1,dataset.RasterCount+1)
######################################
# if dataType is not known let's get it from the data:
# if dataType is not known let's get it from the data:
if dataType is None:
band = dataset.GetRasterBand(1)
dataType = GDAL2NUMPY_DATATYPE[band.DataType]
@ -149,13 +156,13 @@ def fill(data, invalid=None):
"""
Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell
Input:
data: numpy array of any dimension
invalid: a binary array of same shape as 'data'.
data value are replaced where invalid is True
If None (default), use: invalid = np.isnan(data)
Output:
Return a filled array.
"""
@ -171,7 +178,7 @@ def mask_filter(inps, band, outName):
"""masking and Filtering"""
# read offset
offset = read(inps.denseOffset, bands=band)
offset = read(inps.denseOffset, bands=band)
offset = offset[0,:,:]
# read SNR
@ -184,17 +191,21 @@ def mask_filter(inps, band, outName):
offset1 = np.array(offset)
offset1[snr < inps.snrThreshold] = np.nan
# percentage of masked out pixels among all non-zero SNR pixels
perc = np.sum(snr >= inps.snrThreshold) / np.sum(snr > 0)
print('percentage of pixels with SNR >= {} among pixels with SNR > 0: {:.0%}'.format(inps.snrThreshold, perc))
# fill the hole in offset with nearest data
print('fill the masked out region with nearest data')
offset2 = fill(offset1)
# median filtering
print('filtering with median filter with size : ', inps.filterSize)
print('filtering with median filter with size: {}'.format(inps.filterSize))
offset3 = ndimage.median_filter(offset2, size=inps.filterSize)
length, width = offset3.shape
# write data to file
print('writing masked and filtered offsets to: ', outName)
print('writing masked and filtered offsets to: {}'.format(outName))
write(offset3, outName, 1, 6)
# write the xml/vrt/hdr file
@ -216,33 +227,55 @@ def mask_filter(inps, band, outName):
def plot_mask_and_filtering(az_list, rg_list, inps=None):
print('-'*30)
print('plotting mask and filtering result ...')
print('mask pixels with SNR == 0 (for plotting ONLY; data files are untouched)')
snr = az_list[0]
for i in range(1, len(az_list)):
az_list[i][snr == 0] = np.nan
rg_list[i][snr == 0] = np.nan
# percentage of masked out pixels among all non-zero SNR pixels
perc = np.sum(snr >= inps.snrThreshold) / np.sum(snr > 0)
print('percentage of pixels with SNR >= {} among pixels with SNR > 0: {:.0%}'.format(inps.snrThreshold, perc))
fig, axs = plt.subplots(nrows=2, ncols=5, figsize=inps.figsize, sharex=True, sharey=True)
titles = ['SNR', 'offset', 'offset (mask)', 'offset (mask/fill)', 'offset (mask/fill/filter)']
titles = ['SNR',
'offset',
'offset (mask {} - {:.0%} remain)'.format(inps.snrThreshold, perc),
'offset (mask {} / fill)'.format(inps.snrThreshold),
'offset (mask {} / fill / filter {})'.format(inps.snrThreshold, inps.filterSize)]
# plot SNR
im0 = axs[0,0].imshow(az_list[0], vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu')
im0 = axs[1,0].imshow(rg_list[0], vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu')
kwargs = dict(vmin=inps.vlim_snr[0], vmax=inps.vlim_snr[1], cmap='RdBu', interpolation='nearest')
im0 = axs[0,0].imshow(snr, **kwargs)
im0 = axs[1,0].imshow(snr, **kwargs)
axs[0,0].set_title('SNR', fontsize=12)
print('SNR data range: [{}, {}]'.format(np.nanmin(snr), np.nanmax(snr)))
# label
axs[0,0].set_ylabel('azimuth', fontsize=12)
axs[1,0].set_ylabel('range', fontsize=12)
# plot offset
kwargs = dict(vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet', interpolation='nearest')
for i in range(1,len(az_list)):
im1 = axs[0,i].imshow(az_list[i], vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet')
im1 = axs[1,i].imshow(rg_list[i], vmin=inps.vlim[0], vmax=inps.vlim[1], cmap='jet')
im1 = axs[0,i].imshow(az_list[i], **kwargs)
im1 = axs[1,i].imshow(rg_list[i], **kwargs)
axs[0,i].set_title(titles[i], fontsize=12)
print('{} data range'.format(titles[i]))
print('azimuth offset: [{:.3f}, {:.3f}]'.format(np.nanmin(az_list[i]), np.nanmax(az_list[i])))
print('range offset: [{:.3f}, {:.3f}]'.format(np.nanmin(rg_list[i]), np.nanmax(rg_list[i])))
fig.tight_layout()
# colorbar
fig.subplots_adjust(bottom=0.15)
cax0 = fig.add_axes([0.09, 0.1, 0.08, 0.015])
cax0 = fig.add_axes([0.08, 0.1, 0.08, 0.015])
cbar0 = plt.colorbar(im0, cax=cax0, orientation='horizontal')
cax0.yaxis.set_ticks_position('left')
#fig.subplots_adjust(right=0.93)
cax1 = fig.add_axes([0.57, 0.1, 0.08, 0.015])
cax1 = fig.add_axes([0.60, 0.1, 0.15, 0.015])
cbar1 = plt.colorbar(im1, cax=cax1, orientation='horizontal')
cbar1.set_label('pixel', fontsize=12)
@ -259,7 +292,7 @@ def main(iargs=None):
inps = cmdLineParse(iargs)
os.makedirs(inps.outD, exist_ok=True)
os.makedirs(inps.outDir, exist_ok=True)
#######################
# masking the dense offsets based on SNR and median filter the masked offs
@ -283,4 +316,3 @@ if __name__ == '__main__':
Main driver.
'''
main()

View File

@ -11,13 +11,13 @@ import isceobj
from iscesys.Component.ProductManager import ProductManager as PM
import numpy as np
from netCDF4 import Dataset
from mpl_toolkits.basemap import Basemap
import gdal
from scipy.interpolate import interp2d, griddata
import matplotlib.pyplot as plt
class grossOffsets:
def __init__(self):
@ -116,6 +116,7 @@ class grossOffsets:
#x,y = np.meshgrid(self.x0,self.y0)
#from mpl_toolkits.basemap import Basemap
#self.AntVeloDataMap = Basemap(width=5600000,height=5600000,\
# resolution='l',projection='stere',\
# lat_ts=-71,lat_0=-90,lon_0=0)

3
defaults/logging/logging.conf Normal file → Executable file
View File

@ -21,8 +21,7 @@ propagate=0
class=handlers.RotatingFileHandler
formatter=simpleFormatter
# Filename, file mode, maximum file size in bytes,number of backups to keep
# encoding, delay
args=('isce.log','a',1048576,5,None,True)
args=('isce.log','a',1000048576,5)
[handler_consoleHandler]
class=StreamHandler

View File

@ -1,4 +1,5 @@
Python_add_library(isceLib MODULE
cython_add_module(isceLib
pyx/isceLib.pyx
src/Ellipsoid.cpp
src/LinAlg.cpp
src/Orbit.cpp