更新20250116
parent
e69457e3df
commit
a56a208c46
|
|
@ -0,0 +1,257 @@
|
|||
version: 2.1
|
||||
jobs:
|
||||
test-cmake:
|
||||
docker:
|
||||
- image: ubuntu:20.04
|
||||
steps:
|
||||
- checkout:
|
||||
path: /root/project/src
|
||||
|
||||
- run:
|
||||
name: Install ISCE requirements
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get install -y cmake cython3 git libfftw3-dev libgdal-dev libhdf4-alt-dev libhdf5-dev libopencv-dev python3-gdal python3-h5py python3-numpy python3-scipy
|
||||
|
||||
- run:
|
||||
name: Build and Install ISCE
|
||||
command: |
|
||||
set -ex
|
||||
cd /root/project/src
|
||||
mkdir build
|
||||
cd build
|
||||
MODPATH=$(python3 -c "import site; print(site.getsitepackages()[-1])")
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=install -DPYTHON_MODULE_DIR=$MODPATH
|
||||
make install VERBOSE=y
|
||||
|
||||
- run:
|
||||
name: Test ISCE installation
|
||||
command: |
|
||||
set -ex
|
||||
cd /root/project/src/build
|
||||
ctest --output-on-failure
|
||||
ISCE2DIR=$(python3 -c "import os, isce2; print(os.path.dirname(isce2.__file__))" | tail -n 1)
|
||||
export PATH=$ISCE2DIR/applications:$PATH
|
||||
topsApp.py --help --steps
|
||||
stripmapApp.py --help --steps
|
||||
python3 -c "import isce"
|
||||
# Create dummy ref/secondary configs for topsApp
|
||||
ln -s ../examples/input_files/reference_TOPS_SENTINEL1.xml reference.xml
|
||||
ln -s reference.xml secondary.xml
|
||||
topsApp.py --steps --end=preprocess ../examples/input_files/topsApp.xml
|
||||
|
||||
test:
|
||||
docker:
|
||||
- image: ubuntu:20.04
|
||||
steps:
|
||||
- checkout:
|
||||
path: /root/project/src
|
||||
|
||||
- run:
|
||||
name: Install ISCE requirements
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
mkdir config build install
|
||||
export DEBIAN_FRONTEND=noninteractive
|
||||
apt-get update
|
||||
apt-get install -y scons cython3 git libfftw3-dev libgdal-dev libhdf4-alt-dev libhdf5-dev libmotif-dev libopencv-dev libx11-dev python3-gdal python3-h5py python3-numpy python3-scipy
|
||||
|
||||
- run:
|
||||
name: Build SConfigISCE and setup dirs
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
cd config
|
||||
echo "PRJ_SCONS_BUILD = /root/project/build" > SConfigISCE
|
||||
echo "PRJ_SCONS_INSTALL = /root/project/install/isce" >> SConfigISCE
|
||||
echo "LIBPATH = /usr/lib64 /usr/lib /usr/lib/x86_64-linux-gnu" >> SConfigISCE
|
||||
python_inc="/usr/include/python3.8 /usr/lib/python3/dist-packages/numpy/core/include"
|
||||
echo "CPPPATH = $python_inc /usr/include /usr/include/gdal /usr/include/opencv4" >> SConfigISCE
|
||||
echo "FORTRANPATH = /usr/include" >> SConfigISCE
|
||||
echo "FORTRAN = /bin/gfortran" >> SConfigISCE
|
||||
echo "CC = /bin/gcc" >> SConfigISCE
|
||||
echo "CXX = /bin/g++" >> SConfigISCE
|
||||
echo "MOTIFLIBPATH = /usr/lib64" >> SConfigISCE
|
||||
echo "X11LIBPATH = /usr/lib64" >> SConfigISCE
|
||||
echo "MOTIFINCPATH = /usr/include" >> SConfigISCE
|
||||
echo "X11INCPATH = /usr/include" >> SConfigISCE
|
||||
echo "RPATH = /usr/lib64 /usr/lib" >> SConfigISCE
|
||||
cat SConfigISCE
|
||||
|
||||
- run:
|
||||
name: Build and Install ISCE
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
cd src
|
||||
SCONS_CONFIG_DIR=/root/project/config scons install --skipcheck
|
||||
|
||||
- run:
|
||||
name: Test ISCE installation
|
||||
command: |
|
||||
set -ex
|
||||
pwd
|
||||
ISCE_HOME=/root/project/install/isce
|
||||
export PATH="$ISCE_HOME/bin:$ISCE_HOME/applications:$PATH"
|
||||
export PYTHONPATH="/root/project/install:$PYTHONPATH"
|
||||
topsApp.py --help --steps
|
||||
stripmapApp.py --help --steps
|
||||
python3 -c "import isce"
|
||||
python3 -c "import isce; from isceobj.Sensor import SENSORS as s; [s[k]() for k in s]"
|
||||
build:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
python3-dev py3-pip bash pigz build-base libffi-dev openssl-dev \
|
||||
docker-compose aws-cli
|
||||
- run:
|
||||
name: Build docker image
|
||||
command: |
|
||||
mkdir images
|
||||
SHA1=$(echo $CIRCLE_SHA1 | cut -c1-7)
|
||||
echo "export TAG=$SHA1" >> images/env.sh
|
||||
source images/env.sh
|
||||
docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile .
|
||||
cd images
|
||||
docker save isce/isce2:$TAG > isce2.tar
|
||||
- persist_to_workspace:
|
||||
root: images
|
||||
paths:
|
||||
- "*"
|
||||
build-release:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
python3-dev py3-pip bash pigz build-base libffi-dev openssl-dev \
|
||||
docker-compose aws-cli
|
||||
- run:
|
||||
name: Build docker image
|
||||
command: |
|
||||
mkdir images
|
||||
echo "export TAG=$CIRCLE_TAG" >> images/env.sh
|
||||
source images/env.sh
|
||||
docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile .
|
||||
cd images
|
||||
docker save isce/isce2:$TAG > isce2.tar
|
||||
- persist_to_workspace:
|
||||
root: images
|
||||
paths:
|
||||
- "*"
|
||||
build-periodically:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- checkout
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
python3-dev py3-pip bash pigz build-base libffi-dev openssl-dev \
|
||||
docker-compose aws-cli
|
||||
- run:
|
||||
name: Build docker image
|
||||
command: |
|
||||
mkdir images
|
||||
echo 'export TAG=$(date -u +%Y%m%d)' >> images/env.sh
|
||||
source images/env.sh
|
||||
docker build --rm --force-rm -t isce/isce2:$TAG -f docker/Dockerfile .
|
||||
cd images
|
||||
docker save isce/isce2:$TAG > isce2.tar
|
||||
- persist_to_workspace:
|
||||
root: images
|
||||
paths:
|
||||
- "*"
|
||||
deploy:
|
||||
docker:
|
||||
- image: docker:stable-git
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- run:
|
||||
name: Install dependencies
|
||||
command: |
|
||||
apk add --no-cache \
|
||||
curl file
|
||||
- attach_workspace:
|
||||
at: images
|
||||
- run:
|
||||
name: Deploy
|
||||
command: |
|
||||
cd images
|
||||
source env.sh
|
||||
docker load -i isce2.tar
|
||||
docker tag isce/isce2:$TAG isce/isce2:latest
|
||||
docker login -u $DOCKER_USER -p $DOCKER_PASS
|
||||
docker push isce/isce2:$TAG
|
||||
docker push isce/isce2:latest
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
test:
|
||||
jobs:
|
||||
- test
|
||||
- test-cmake
|
||||
- build
|
||||
build-deploy:
|
||||
jobs:
|
||||
- build:
|
||||
filters:
|
||||
branches:
|
||||
only: main
|
||||
- deploy:
|
||||
requires:
|
||||
- build
|
||||
filters:
|
||||
branches:
|
||||
only: main
|
||||
build-deploy-release:
|
||||
jobs:
|
||||
- build-release:
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
- deploy:
|
||||
requires:
|
||||
- build-release
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
branches:
|
||||
ignore: /.*/
|
||||
weekly:
|
||||
triggers:
|
||||
- schedule:
|
||||
cron: "0 7 * * 0"
|
||||
filters:
|
||||
branches:
|
||||
only:
|
||||
- main
|
||||
jobs:
|
||||
- build-periodically:
|
||||
filters:
|
||||
branches:
|
||||
only: main
|
||||
- deploy:
|
||||
requires:
|
||||
- build-periodically
|
||||
filters:
|
||||
branches:
|
||||
only: main
|
||||
|
|
@ -0,0 +1,14 @@
|
|||
# Tries to run Cython using `python -m cython`
|
||||
execute_process(COMMAND ${Python_EXECUTABLE} -m cython --help
|
||||
RESULT_VARIABLE cython_status
|
||||
ERROR_QUIET OUTPUT_QUIET)
|
||||
|
||||
if(NOT cython_status)
|
||||
set(CYTHON_EXECUTABLE ${Python_EXECUTABLE} -m cython CACHE STRING
|
||||
"Cython executable")
|
||||
endif()
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
FIND_PACKAGE_HANDLE_STANDARD_ARGS(Cython REQUIRED_VARS CYTHON_EXECUTABLE)
|
||||
|
||||
mark_as_advanced(CYTHON_EXECUTABLE)
|
||||
|
|
@ -0,0 +1,167 @@
|
|||
#[[
|
||||
Usage:
|
||||
find_package(FFTW [REQUIRED] [QUIET] [COMPONENTS ...])
|
||||
|
||||
Be warned that this will only search for FFTW3 libraries.
|
||||
|
||||
It sets the following variables:
|
||||
FFTW_FOUND .. true if FFTW is found on the system
|
||||
FFTW_[component]_LIB_FOUND .. true if the component is found (see below)
|
||||
FFTW_LIBRARIES .. full paths to all found FFTW libraries
|
||||
FFTW_[component]_LIB .. full path to one component (see below)
|
||||
FFTW_INCLUDE_DIRS .. FFTW include directory paths
|
||||
|
||||
The following variables will be checked by the function
|
||||
FFTW_USE_STATIC_LIBS .. if true, only static libraries are searched
|
||||
FFTW_ROOT .. if set, search under this path first
|
||||
|
||||
Paths will be searched in the following order:
|
||||
FFTW_ROOT (if provided)
|
||||
PkgConfig paths (if found)
|
||||
Library/include installation directories
|
||||
Default find_* paths
|
||||
|
||||
The following component library locations will be defined (if found):
|
||||
FFTW_FLOAT_LIB
|
||||
FFTW_DOUBLE_LIB
|
||||
FFTW_LONGDOUBLE_LIB
|
||||
FFTW_FLOAT_THREADS_LIB
|
||||
FFTW_DOUBLE_THREADS_LIB
|
||||
FFTW_LONGDOUBLE_THREADS_LIB
|
||||
FFTW_FLOAT_OMP_LIB
|
||||
FFTW_DOUBLE_OMP_LIB
|
||||
FFTW_LONGDOUBLE_OMP_LIB
|
||||
|
||||
The following IMPORTED targets will be created (if found):
|
||||
FFTW::Float
|
||||
FFTW::Double
|
||||
FFTW::LongDouble
|
||||
FFTW::FloatThreads
|
||||
FFTW::DoubleThreads
|
||||
FFTW::LongDoubleThreads
|
||||
FFTW::FloatOMP
|
||||
FFTW::DoubleOMP
|
||||
FFTW::LongDoubleOMP
|
||||
]]
|
||||
|
||||
include(FindPackageHandleStandardArgs)
|
||||
|
||||
if(NOT FFTW_ROOT AND DEFINED ENV{FFTWDIR})
|
||||
set(FFTW_ROOT $ENV{FFTWDIR})
|
||||
endif()
|
||||
|
||||
# Check if we can use PkgConfig
|
||||
find_package(PkgConfig)
|
||||
|
||||
# Determine from PKG
|
||||
if(PKG_CONFIG_FOUND)
|
||||
pkg_check_modules(PKG_FFTW QUIET fftw3)
|
||||
endif()
|
||||
|
||||
# Check whether to search static or dynamic libs
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES_SAV ${CMAKE_FIND_LIBRARY_SUFFIXES})
|
||||
|
||||
if(${FFTW_USE_STATIC_LIBS})
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_STATIC_LIBRARY_SUFFIX})
|
||||
else()
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_SAV})
|
||||
endif()
|
||||
|
||||
# Paths to pass to find_library for each component
|
||||
set(findlib_paths
|
||||
${FFTW_ROOT}
|
||||
${PKG_FFTW_LIBRARY_DIRS}
|
||||
${LIB_INSTALL_DIR}
|
||||
)
|
||||
|
||||
# Find include directory
|
||||
find_path(FFTW_INCLUDE_DIRS
|
||||
NAMES fftw3.h
|
||||
PATHS ${FFTW_ROOT}
|
||||
${PKG_FFTW_INCLUDE_DIRS}
|
||||
${INCLUDE_INSTALL_DIR}
|
||||
PATH_SUFFIXES include
|
||||
)
|
||||
|
||||
set(FFTW_LIBRARIES "")
|
||||
|
||||
foreach(dtype Float Double LongDouble)
|
||||
|
||||
# Single-letter suffix for the library name
|
||||
string(REGEX REPLACE "(.).*" "\\1" letter ${dtype})
|
||||
string(TOLOWER ${letter} letter)
|
||||
# The double-precision library doesn't use a suffix
|
||||
if("${letter}" STREQUAL "d")
|
||||
set(letter "")
|
||||
endif()
|
||||
|
||||
foreach(system "" Threads OMP)
|
||||
|
||||
# CamelCase component name used for interface libraries
|
||||
# e.g. FloatThreads
|
||||
set(component ${dtype}${system})
|
||||
|
||||
# Component library location variable used via find_library
|
||||
# e.g. FFTW_DOUBLE_THREADS_LIB
|
||||
if(system)
|
||||
set(libvar FFTW_${dtype}_${system}_LIB)
|
||||
else()
|
||||
set(libvar FFTW_${dtype}_LIB)
|
||||
endif()
|
||||
string(TOUPPER ${libvar} libvar)
|
||||
|
||||
# Filename root common to all libraries
|
||||
set(libname fftw3${letter})
|
||||
if(system)
|
||||
string(TOLOWER ${system} systemlower)
|
||||
set(libname ${libname}_${systemlower})
|
||||
endif()
|
||||
# Actual filenames looked for by find_library
|
||||
set(libnames
|
||||
${libname}
|
||||
lib${libname}3-3
|
||||
)
|
||||
|
||||
find_library(
|
||||
${libvar}
|
||||
NAMES ${libnames}
|
||||
PATHS ${findlib_paths}
|
||||
PATH_SUFFIXES lib lib64
|
||||
)
|
||||
|
||||
# Tell find_package whether this component was found
|
||||
set(FFTW_${component}_FIND_QUIETLY TRUE)
|
||||
# Also set the value of the legacy library-variable
|
||||
# (Will be set to *-NOTFOUND if not found)
|
||||
set(${libvar} ${FFTW_${component}})
|
||||
|
||||
# If the library was found:
|
||||
if(${libvar} AND NOT TARGET FFTW::${component})
|
||||
# Add it to the list of FFTW libraries
|
||||
list(APPEND FFTW_LIBRARIES ${${libvar}})
|
||||
|
||||
# Create a corresponding interface library
|
||||
add_library(FFTW::${component} IMPORTED INTERFACE)
|
||||
target_include_directories(
|
||||
FFTW::${component} SYSTEM INTERFACE ${FFTW_INCLUDE_DIRS})
|
||||
target_link_libraries(
|
||||
FFTW::${component} INTERFACE ${${libvar}})
|
||||
endif()
|
||||
|
||||
mark_as_advanced(${libvar})
|
||||
|
||||
endforeach()
|
||||
endforeach()
|
||||
|
||||
# Restore saved find_library suffixes
|
||||
set(CMAKE_FIND_LIBRARY_SUFFIXES ${CMAKE_FIND_LIBRARY_SUFFIXES_SAV})
|
||||
|
||||
find_package_handle_standard_args(FFTW
|
||||
REQUIRED_VARS FFTW_LIBRARIES FFTW_INCLUDE_DIRS
|
||||
HANDLE_COMPONENTS
|
||||
)
|
||||
|
||||
mark_as_advanced(
|
||||
FFTW_INCLUDE_DIRS
|
||||
FFTW_LIBRARIES
|
||||
)
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
find_package(GDAL)
|
||||
|
||||
# Make a compatibility GDAL::GDAL interface target
|
||||
# In CMake >= 3.14, this already exists for us :)
|
||||
if(GDAL_FOUND AND NOT TARGET GDAL::GDAL)
|
||||
add_library(GDAL::GDAL IMPORTED INTERFACE)
|
||||
target_include_directories(GDAL::GDAL SYSTEM INTERFACE ${GDAL_INCLUDE_DIRS})
|
||||
target_link_libraries(GDAL::GDAL INTERFACE ${GDAL_LIBRARIES})
|
||||
endif()
|
||||
|
|
@ -0,0 +1,9 @@
|
|||
find_package(Motif)
|
||||
|
||||
if(MOTIF_FOUND AND NOT TARGET Motif::Motif)
|
||||
add_library(Motif::Motif IMPORTED INTERFACE)
|
||||
target_include_directories(Motif::Motif
|
||||
SYSTEM INTERFACE ${MOTIF_INCLUDE_DIR})
|
||||
target_link_libraries(Motif::Motif
|
||||
INTERFACE ${MOTIF_LIBRARIES})
|
||||
endif()
|
||||
|
|
@ -0,0 +1,25 @@
|
|||
set(components
|
||||
Xau
|
||||
Xt
|
||||
)
|
||||
|
||||
find_package(X11 COMPONENTS ${components})
|
||||
|
||||
if(X11_FOUND)
|
||||
|
||||
# make X11 look like a regular find_package component
|
||||
set(X11_X11_FOUND TRUE)
|
||||
set(X11_X11_INCLUDE_PATH ${X11_INCLUDE_DIR})
|
||||
list(APPEND components X11)
|
||||
|
||||
foreach(component ${components})
|
||||
if(X11_${component}_FOUND AND
|
||||
NOT TARGET X11::${component})
|
||||
add_library(X11::${component} IMPORTED INTERFACE)
|
||||
target_link_libraries(X11::${component}
|
||||
INTERFACE ${X11_${component}_LIB})
|
||||
target_include_directories(X11::${component} SYSTEM
|
||||
INTERFACE ${X11_${component}_INCLUDE_PATH})
|
||||
endif()
|
||||
endforeach()
|
||||
endif()
|
||||
|
|
@ -0,0 +1,142 @@
|
|||
# Define a function to create Cython modules.
|
||||
#
|
||||
# For more information on the Cython project, see http://cython.org/.
|
||||
# "Cython is a language that makes writing C extensions for the Python language
|
||||
# as easy as Python itself."
|
||||
#
|
||||
# This file defines a CMake function to build a Cython Python module.
|
||||
# To use it, first include this file.
|
||||
#
|
||||
# include(UseCython)
|
||||
#
|
||||
# Then call cython_add_module to create a module.
|
||||
#
|
||||
# cython_add_module(<module_name> <src1> <src2> ... <srcN>)
|
||||
#
|
||||
# Where <module_name> is the name of the resulting Python module and
|
||||
# <src1> <src2> ... are source files to be compiled into the module, e.g. *.pyx,
|
||||
# *.py, *.cxx, etc. A CMake target is created with name <module_name>. This can
|
||||
# be used for target_link_libraries(), etc.
|
||||
#
|
||||
# The sample paths set with the CMake include_directories() command will be used
|
||||
# for include directories to search for *.pxd when running the Cython complire.
|
||||
#
|
||||
# Cache variables that effect the behavior include:
|
||||
#
|
||||
# CYTHON_ANNOTATE
|
||||
# CYTHON_NO_DOCSTRINGS
|
||||
# CYTHON_FLAGS
|
||||
#
|
||||
# See also FindCython.cmake
|
||||
|
||||
#=============================================================================
|
||||
# Copyright 2011 Kitware, Inc.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#=============================================================================
|
||||
|
||||
# Configuration options.
|
||||
set( CYTHON_ANNOTATE OFF
|
||||
CACHE BOOL "Create an annotated .html file when compiling *.pyx." )
|
||||
set( CYTHON_NO_DOCSTRINGS OFF
|
||||
CACHE BOOL "Strip docstrings from the compiled module." )
|
||||
set( CYTHON_FLAGS "" CACHE STRING
|
||||
"Extra flags to the cython compiler." )
|
||||
mark_as_advanced( CYTHON_ANNOTATE CYTHON_NO_DOCSTRINGS CYTHON_FLAGS )
|
||||
|
||||
find_package(Cython REQUIRED)
|
||||
find_package(Python REQUIRED COMPONENTS Development)
|
||||
|
||||
# Check the version of Cython
|
||||
execute_process( COMMAND ${CYTHON_EXECUTABLE} --version
|
||||
OUTPUT_VARIABLE CYTHON_VERSION ERROR_VARIABLE CYTHON_VERSION )
|
||||
string(REGEX MATCH "([0-9]|\\.)+" CYTHON_VERSION ${CYTHON_VERSION})
|
||||
if((CYTHON_VERSION VERSION_GREATER_EQUAL 0.28.1))
|
||||
message(STATUS "Found Cython: ${CYTHON_VERSION}")
|
||||
else()
|
||||
message(FATAL_ERROR "Could not find Cython version >= 0.28.1")
|
||||
endif()
|
||||
|
||||
# Create a *.cxx file from a *.pyx file.
|
||||
# Input the generated file basename. The generate file will put into the variable
|
||||
# placed in the "generated_file" argument. Finally all the *.py and *.pyx files.
|
||||
function( compile_pyx _name generated_file )
|
||||
|
||||
set( pyx_locations "" )
|
||||
|
||||
foreach( pyx_file ${ARGN} )
|
||||
# Get the include directories.
|
||||
get_source_file_property( pyx_location ${pyx_file} LOCATION )
|
||||
get_filename_component( pyx_path ${pyx_location} PATH )
|
||||
list( APPEND pyx_locations "${pyx_location}" )
|
||||
endforeach() # pyx_file
|
||||
|
||||
# Set additional flags.
|
||||
set(cython_args "")
|
||||
if( CYTHON_ANNOTATE )
|
||||
list(APPEND cython_args "--annotate" )
|
||||
endif()
|
||||
|
||||
if( CYTHON_NO_DOCSTRINGS )
|
||||
list(APPEND cython_args "--no-docstrings")
|
||||
endif()
|
||||
|
||||
if("${CMAKE_BUILD_TYPE}" STREQUAL "Debug" OR
|
||||
"${CMAKE_BUILD_TYPE}" STREQUAL "RelWithDebInfo")
|
||||
set(APPEND cython_args "--gdb")
|
||||
endif()
|
||||
|
||||
list(APPEND cython_args "-${Python_VERSION_MAJOR}")
|
||||
|
||||
# Determining generated file name.
|
||||
set(_generated_file ${CMAKE_CURRENT_BINARY_DIR}/${_name}.cxx)
|
||||
set_source_files_properties( ${_generated_file} PROPERTIES GENERATED TRUE )
|
||||
set( ${generated_file} ${_generated_file} PARENT_SCOPE )
|
||||
|
||||
# Add the command to run the compiler.
|
||||
add_custom_command( OUTPUT ${_generated_file}
|
||||
COMMAND ${CYTHON_EXECUTABLE}
|
||||
ARGS --cplus ${cython_args} ${CYTHON_FLAGS}
|
||||
--output-file ${_generated_file} ${pyx_locations}
|
||||
DEPENDS ${pyx_locations}
|
||||
IMPLICIT_DEPENDS CXX
|
||||
COMMENT "Compiling Cython CXX source for ${_name}..."
|
||||
)
|
||||
endfunction()
|
||||
|
||||
# cython_add_module( <name> src1 src2 ... srcN )
|
||||
# Build the Cython Python module.
|
||||
function( cython_add_module _name )
|
||||
set( pyx_module_sources "" )
|
||||
set( other_module_sources "" )
|
||||
foreach( _file ${ARGN} )
|
||||
if( ${_file} MATCHES ".*\\.py[x]?$" )
|
||||
list( APPEND pyx_module_sources ${_file} )
|
||||
else()
|
||||
list( APPEND other_module_sources ${_file} )
|
||||
endif()
|
||||
endforeach()
|
||||
set( CYTHON_FLAGS ${CYTHON_FLAGS} -X embedsignature=True)
|
||||
compile_pyx( ${_name} generated_file ${pyx_module_sources} )
|
||||
Python_add_library( ${_name} MODULE ${generated_file} ${other_module_sources} )
|
||||
if( APPLE )
|
||||
set_target_properties( ${_name} PROPERTIES LINK_FLAGS "-undefined dynamic_lookup" )
|
||||
endif()
|
||||
# ignore overflow warnings caused by Python's implicit conversions
|
||||
set_property( SOURCE ${generated_file}
|
||||
PROPERTY COMPILE_OPTIONS -Wno-overflow APPEND )
|
||||
# ignore Numpy deprecated API warning
|
||||
# ignore warnings for using the #warning extension directive
|
||||
# TODO fix -Wno-cpp for nvcc
|
||||
# target_compile_options( ${_name} PRIVATE -Wno-cpp -Wno-pedantic)
|
||||
endfunction()
|
||||
|
|
@ -0,0 +1,50 @@
|
|||
# TODO (global build flags)
|
||||
# These definitions and compile options are
|
||||
# set globally for convenience.
|
||||
# Perhaps we should apply them only as needed on a
|
||||
# per-target basis, and propagate them via the interface?
|
||||
add_definitions(-DNEEDS_F77_TRANSLATION -DF77EXTERNS_LOWERCASE_TRAILINGBAR)
|
||||
add_compile_options(
|
||||
$<$<COMPILE_LANGUAGE:Fortran>:-ffixed-line-length-none>
|
||||
$<$<COMPILE_LANGUAGE:Fortran>:-ffree-line-length-none>
|
||||
$<$<COMPILE_LANGUAGE:Fortran>:-fno-range-check>
|
||||
$<$<COMPILE_LANGUAGE:Fortran>:-fno-second-underscore>)
|
||||
if(CMAKE_Fortran_COMPILER_ID STREQUAL "GNU" AND
|
||||
CMAKE_Fortran_COMPILER_VERSION VERSION_GREATER_EQUAL 10)
|
||||
add_compile_options(
|
||||
$<$<COMPILE_LANGUAGE:Fortran>:-fallow-argument-mismatch>)
|
||||
endif()
|
||||
|
||||
# Set up build flags for C++ and Fortran.
|
||||
set(CMAKE_CXX_STANDARD 11)
|
||||
set(CMAKE_CXX_STANDARD_REQUIRED y)
|
||||
set(CMAKE_CXX_EXTENSIONS n)
|
||||
|
||||
include(GNUInstallDirs)
|
||||
|
||||
# add automatically determined parts of the RPATH, which point to directories
|
||||
# outside of the build tree, to the install RPATH
|
||||
set(CMAKE_INSTALL_RPATH_USE_LINK_PATH ON)
|
||||
|
||||
# the RPATH to be used when installing, but only if it's not a system directory
|
||||
set(abs_libdir ${CMAKE_INSTALL_PREFIX}/${CMAKE_INSTALL_LIBDIR})
|
||||
list(FIND CMAKE_PLATFORM_IMPLICIT_LINK_DIRECTORIES ${abs_libdir} isSystemDir)
|
||||
if("${isSystemDir}" STREQUAL "-1")
|
||||
list(APPEND CMAKE_INSTALL_RPATH ${abs_libdir})
|
||||
endif()
|
||||
|
||||
option(ISCE2_STRICT_COMPILATION "Enable strict checks during compilation" ON)
|
||||
if(ISCE2_STRICT_COMPILATION)
|
||||
|
||||
# Set -fno-common when supported to catch ODR violations
|
||||
include(CheckCCompilerFlag)
|
||||
check_c_compiler_flag(-fno-common C_FNO_COMMON)
|
||||
if(C_FNO_COMMON)
|
||||
add_compile_options($<$<COMPILE_LANGUAGE:C>:-fno-common>)
|
||||
endif()
|
||||
include(CheckCXXCompilerFlag)
|
||||
check_cxx_compiler_flag(-fno-common CXX_FNO_COMMON)
|
||||
if(CXX_FNO_COMMON)
|
||||
add_compile_options($<$<COMPILE_LANGUAGE:CXX>:-fno-common>)
|
||||
endif()
|
||||
endif()
|
||||
|
|
@ -0,0 +1,99 @@
|
|||
# There are a lot of similarly-built modules in isce2
|
||||
# so we add some helpers here to avoid code duplication.
|
||||
# TODO maybe these helpers should have a unique prefix, e.g. "isce2_"
|
||||
|
||||
# Compute a prefix based on the current project subdir
|
||||
# This disambiguates tests with similar names and
|
||||
# allows better pattern matching using `ctest -R`
|
||||
macro(isce2_get_dir_prefix)
|
||||
file(RELATIVE_PATH dir_prefix ${CMAKE_SOURCE_DIR} ${CMAKE_CURRENT_LIST_DIR})
|
||||
string(REPLACE "/" "." dir_prefix ${dir_prefix})
|
||||
endmacro()
|
||||
|
||||
# Usage: isce2_add_staticlib(name [sources ...])
|
||||
# Creates a SCons-like isce2 intermediate library.
|
||||
# The actual target will also be available via the namespaced isce2:: alias.
|
||||
macro(isce2_add_staticlib name)
|
||||
add_library(${name} STATIC ${ARGN})
|
||||
set_target_properties(${name} PROPERTIES
|
||||
OUTPUT_NAME ${name}
|
||||
POSITION_INDEPENDENT_CODE ON
|
||||
)
|
||||
# add alias matching exported target
|
||||
add_library(isce2::${name} ALIAS ${name})
|
||||
endmacro()
|
||||
|
||||
# Usage: isce2_add_cdll(libname [sources ...])
|
||||
# These libraries are loaded using a hardcoded filename, so this
|
||||
# macro simplifies adding target properties to make that possible.
|
||||
macro(isce2_add_cdll target)
|
||||
add_library(${target} SHARED ${ARGN})
|
||||
set_target_properties(${target} PROPERTIES
|
||||
PREFIX ""
|
||||
OUTPUT_NAME ${target}
|
||||
SUFFIX .so)
|
||||
|
||||
# If we're the root cmake project (e.g. not add_subdirectory):
|
||||
if("${CMAKE_SOURCE_DIR}" STREQUAL "${PROJECT_SOURCE_DIR}")
|
||||
# override this to also test the resulting extension
|
||||
add_test(NAME load_cdll_${target}
|
||||
COMMAND ${Python_EXECUTABLE} -c
|
||||
"from ctypes import cdll; \
|
||||
cdll.LoadLibrary('$<TARGET_FILE:${target}>')"
|
||||
)
|
||||
endif()
|
||||
endmacro()
|
||||
|
||||
# Usage:
|
||||
# add_exe_test(main.cpp helpers.F [additional_source.c ...] )
|
||||
# or
|
||||
# add_exe_test(target_from_add_executable)
|
||||
# The latter form is useful when you need to add dependencies,
|
||||
# since the former mangles the name via dir_prefix.
|
||||
function(add_exe_test testfile)
|
||||
isce2_get_dir_prefix()
|
||||
if(TARGET ${testfile})
|
||||
set(target ${testfile})
|
||||
set(testname ${dir_prefix}.${testfile})
|
||||
else()
|
||||
set(target ${dir_prefix}.${testfile})
|
||||
add_executable(${target} ${testfile} ${ARGN})
|
||||
set(testname ${target})
|
||||
endif()
|
||||
add_test(NAME ${testname} COMMAND ${target})
|
||||
endfunction()
|
||||
|
||||
# Usage:
|
||||
# add_python_test(mytest.py)
|
||||
# This is simpler than add_exe_test since there is no compilation step.
|
||||
# The python file is esecuted directly, using the exit status as the result.
|
||||
function(add_python_test testfile)
|
||||
isce2_get_dir_prefix()
|
||||
set(testname ${dir_prefix}.${testfile})
|
||||
add_test(NAME ${testname} COMMAND
|
||||
${Python_EXECUTABLE} ${CMAKE_CURRENT_LIST_DIR}/${testfile})
|
||||
set_tests_properties(${testname} PROPERTIES
|
||||
ENVIRONMENT PYTHONPATH=${CMAKE_INSTALL_PREFIX}/${PYTHON_MODULE_DIR})
|
||||
endfunction()
|
||||
|
||||
# Computes the relative path from the current binary dir to the base binary
|
||||
# dir, and installs the given files/targets using this relative path with
|
||||
# respect to the python package dir.
|
||||
# This greatly simplifies installation since the source dir structure
|
||||
# primarily mimics the python package directory structure.
|
||||
# Note that it first checks if a provided file is a target,
|
||||
# and if so, installs it as a TARGET instead. Make sure your
|
||||
# filenames and target names don't have any overlap!
|
||||
function(InstallSameDir)
|
||||
foreach(name ${ARGN})
|
||||
if(TARGET ${name})
|
||||
set(installtype TARGETS)
|
||||
else()
|
||||
set(installtype FILES)
|
||||
endif()
|
||||
file(RELATIVE_PATH path ${isce2_BINARY_DIR} ${CMAKE_CURRENT_BINARY_DIR})
|
||||
install(${installtype} ${name}
|
||||
DESTINATION ${ISCE2_PKG}/${path}
|
||||
)
|
||||
endforeach()
|
||||
endfunction()
|
||||
|
|
@ -1,269 +1,11 @@
|
|||
# ---> Python
|
||||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
share/python-wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
MANIFEST
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.nox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
*.py,cover
|
||||
.hypothesis/
|
||||
.pytest_cache/
|
||||
cover/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
db.sqlite3
|
||||
db.sqlite3-journal
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
.pybuilder/
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
*.pyc
|
||||
*~
|
||||
*.swp
|
||||
*.DS_Store
|
||||
__pycache__
|
||||
.sconf_temp
|
||||
.sconsign.dblite
|
||||
config.log
|
||||
insar.log
|
||||
isce.log
|
||||
.ipynb_checkpoints
|
||||
|
||||
# IPython
|
||||
profile_default/
|
||||
ipython_config.py
|
||||
|
||||
# pyenv
|
||||
# For a library or package, you might want to ignore these files since the code is
|
||||
# intended to run in multiple environments; otherwise, check them in:
|
||||
# .python-version
|
||||
|
||||
# pipenv
|
||||
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||
# install all needed dependencies.
|
||||
#Pipfile.lock
|
||||
|
||||
# poetry
|
||||
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||
# commonly ignored for libraries.
|
||||
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||
#poetry.lock
|
||||
|
||||
# pdm
|
||||
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||
#pdm.lock
|
||||
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||
# in version control.
|
||||
# https://pdm.fming.dev/#use-with-ide
|
||||
.pdm.toml
|
||||
|
||||
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||
__pypackages__/
|
||||
|
||||
# Celery stuff
|
||||
celerybeat-schedule
|
||||
celerybeat.pid
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# Environments
|
||||
.env
|
||||
.venv
|
||||
env/
|
||||
venv/
|
||||
ENV/
|
||||
env.bak/
|
||||
venv.bak/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
.dmypy.json
|
||||
dmypy.json
|
||||
|
||||
# Pyre type checker
|
||||
.pyre/
|
||||
|
||||
# pytype static type analyzer
|
||||
.pytype/
|
||||
|
||||
# Cython debug symbols
|
||||
cython_debug/
|
||||
|
||||
# PyCharm
|
||||
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||
#.idea/
|
||||
|
||||
# ---> CVS
|
||||
/CVS/*
|
||||
**/CVS/*
|
||||
.cvsignore
|
||||
*/.cvsignore
|
||||
|
||||
# ---> C++
|
||||
# Prerequisites
|
||||
*.d
|
||||
|
||||
# Compiled Object files
|
||||
*.slo
|
||||
*.lo
|
||||
*.o
|
||||
*.obj
|
||||
|
||||
# Precompiled Headers
|
||||
*.gch
|
||||
*.pch
|
||||
|
||||
# Compiled Dynamic libraries
|
||||
*.so
|
||||
*.dylib
|
||||
*.dll
|
||||
|
||||
# Fortran module files
|
||||
*.mod
|
||||
*.smod
|
||||
|
||||
# Compiled Static libraries
|
||||
*.lai
|
||||
*.la
|
||||
*.a
|
||||
*.lib
|
||||
|
||||
# Executables
|
||||
*.exe
|
||||
*.out
|
||||
*.app
|
||||
|
||||
# ---> C
|
||||
# Prerequisites
|
||||
*.d
|
||||
|
||||
# Object files
|
||||
*.o
|
||||
*.ko
|
||||
*.obj
|
||||
*.elf
|
||||
|
||||
# Linker output
|
||||
*.ilk
|
||||
*.map
|
||||
*.exp
|
||||
|
||||
# Precompiled Headers
|
||||
*.gch
|
||||
*.pch
|
||||
|
||||
# Libraries
|
||||
*.lib
|
||||
*.a
|
||||
*.la
|
||||
*.lo
|
||||
|
||||
# Shared objects (inc. Windows DLLs)
|
||||
*.dll
|
||||
*.so
|
||||
*.so.*
|
||||
*.dylib
|
||||
|
||||
# Executables
|
||||
*.exe
|
||||
*.out
|
||||
*.app
|
||||
*.i*86
|
||||
*.x86_64
|
||||
*.hex
|
||||
|
||||
# Debug files
|
||||
*.dSYM/
|
||||
*.su
|
||||
*.idb
|
||||
*.pdb
|
||||
|
||||
# Kernel Module Compile Results
|
||||
*.mod*
|
||||
*.cmd
|
||||
.tmp_versions/
|
||||
modules.order
|
||||
Module.symvers
|
||||
Mkfile.old
|
||||
dkms.conf
|
||||
|
||||
# ---> CMake
|
||||
CMakeLists.txt.user
|
||||
CMakeCache.txt
|
||||
CMakeFiles
|
||||
CMakeScripts
|
||||
Testing
|
||||
Makefile
|
||||
cmake_install.cmake
|
||||
install_manifest.txt
|
||||
compile_commands.json
|
||||
CTestTestfile.cmake
|
||||
_deps
|
||||
|
||||
|
|
|
|||
|
|
@ -0,0 +1,108 @@
|
|||
cmake_minimum_required(VERSION 3.13...3.18)
|
||||
|
||||
project(isce2 LANGUAGES C CXX Fortran)
|
||||
|
||||
list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_LIST_DIR}/.cmake)
|
||||
|
||||
include(CheckLanguage)
|
||||
check_language(CUDA)
|
||||
if(CMAKE_CUDA_COMPILER)
|
||||
set(CMAKE_CUDA_STANDARD 11)
|
||||
set(CMAKE_CUDA_STANDARD_REQUIRED TRUE)
|
||||
enable_language(CUDA)
|
||||
find_package(CUDAToolkit) # TODO added in cmake 3.17 - copy this module
|
||||
endif()
|
||||
|
||||
find_package(Python 3.5 REQUIRED COMPONENTS Interpreter Development
|
||||
OPTIONAL_COMPONENTS NumPy)
|
||||
find_package(FFTW REQUIRED)
|
||||
find_package(Motif)
|
||||
find_package(OpenMP REQUIRED COMPONENTS C CXX Fortran)
|
||||
find_package(OpenCV COMPONENTS core highgui imgproc)
|
||||
find_package(pybind11 CONFIG)
|
||||
|
||||
# Find these, and create IMPORTED INTERFACE libraries for them if they exist
|
||||
include(TargetGDAL)
|
||||
include(TargetMotif)
|
||||
include(TargetX11)
|
||||
include(UseCython)
|
||||
|
||||
# If we're the root cmake project (e.g. not add_subdirectory):
|
||||
if("${CMAKE_SOURCE_DIR}" STREQUAL "${CMAKE_CURRENT_LIST_DIR}")
|
||||
# override this to also test the resulting extension
|
||||
function(Python_add_library target)
|
||||
_Python_add_library(${target} ${ARGN})
|
||||
set(name "$<TARGET_PROPERTY:${target},OUTPUT_NAME>")
|
||||
add_test(NAME import_${target}
|
||||
COMMAND ${Python_EXECUTABLE} -c
|
||||
"import $<IF:$<BOOL:${name}>,${name},${target}>"
|
||||
)
|
||||
endfunction()
|
||||
endif()
|
||||
|
||||
if(NOT DEFINED PYTHON_MODULE_DIR)
|
||||
set(PYTHON_MODULE_DIR packages CACHE PATH "Python module directory")
|
||||
endif()
|
||||
if(NOT DEFINED ISCE2_PKG)
|
||||
set(ISCE2_PKG ${PYTHON_MODULE_DIR}/isce2 CACHE PATH
|
||||
"ISCE 2 python package install dir")
|
||||
endif()
|
||||
|
||||
if(IS_ABSOLUTE "${ISCE2_PKG}")
|
||||
set(ISCE2_PKG_FULL "${ISCE2_PKG}")
|
||||
else()
|
||||
set(ISCE2_PKG_FULL "${CMAKE_INSTALL_PREFIX}/${ISCE2_PKG}")
|
||||
endif()
|
||||
|
||||
include(isce2_buildflags)
|
||||
include(isce2_helpers)
|
||||
|
||||
enable_testing()
|
||||
|
||||
add_subdirectory(applications)
|
||||
add_subdirectory(components)
|
||||
add_subdirectory(contrib components/contrib)
|
||||
add_subdirectory(defaults)
|
||||
add_subdirectory(library)
|
||||
add_subdirectory(test)
|
||||
|
||||
InstallSameDir(
|
||||
__init__.py
|
||||
release_history.py
|
||||
)
|
||||
|
||||
file(READ license.py LICENSE_TXT)
|
||||
string(FIND "${LICENSE_TXT}" "stanford_license = None" match)
|
||||
if(${match} EQUAL -1)
|
||||
set(ISCE2_HAVE_LICENSE YES)
|
||||
else()
|
||||
set(ISCE2_HAVE_LICENSE NO)
|
||||
endif()
|
||||
option(ISCE2_WITH_STANFORD "Build Stanford components" ${ISCE2_HAVE_LICENSE})
|
||||
if(ISCE2_WITH_STANFORD)
|
||||
InstallSameDir(license.py)
|
||||
message(STATUS "ISCE2's Stanford-licensed components will be built.")
|
||||
else()
|
||||
message(STATUS "ISCE2's Stanford-licensed components will NOT be built.")
|
||||
endif()
|
||||
|
||||
# We also need to create an empty directory for help
|
||||
install(DIRECTORY DESTINATION ${ISCE2_PKG}/helper)
|
||||
|
||||
# CMake will install a python package named "isce2",
|
||||
# but legacy scripts import it as simply "isce".
|
||||
# Make a symlink isce -> isce2 for compatibility.
|
||||
set(symsrc isce2)
|
||||
if(IS_ABSOLUTE "${PYTHON_MODULE_DIR}")
|
||||
set(symdest "${PYTHON_MODULE_DIR}/isce")
|
||||
else()
|
||||
set(symdest "${CMAKE_INSTALL_PREFIX}/${PYTHON_MODULE_DIR}/isce")
|
||||
endif()
|
||||
install(CODE "execute_process(COMMAND
|
||||
${CMAKE_COMMAND} -E create_symlink ${symsrc} ${symdest})")
|
||||
|
||||
# Enable native packaging using CPack
|
||||
if(NOT CPACK_PACKAGE_CONTACT)
|
||||
set(CPACK_PACKAGE_CONTACT "Ryan Burns <rtburns@jpl.nasa.gov>")
|
||||
endif()
|
||||
include(CPack)
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
# Contributing Guidelines
|
||||
|
||||
This document is inspired by similar instructions document in the GDAL and pygmt repositories.
|
||||
|
||||
These are some of the many ways to contribute to the ISCE project:
|
||||
|
||||
* Submitting bug reports and feature requests
|
||||
* Writing tutorials or jupyter-notebooks
|
||||
* Fixing typos, code and improving documentation
|
||||
* Writing code for everyone to use
|
||||
|
||||
If you get stuck at any point you can create an issue on GitHub (look for the *Issues*
|
||||
tab in the repository) or contact us on the [user forum](http://earthdef.caltech.edu/projects/isce_forum/boards).
|
||||
|
||||
For more information on contributing to open source projects,
|
||||
[GitHub's own guide](https://guides.github.com/activities/contributing-to-open-source/)
|
||||
is a great starting point if you are new to version control.
|
||||
|
||||
|
||||
## Ground Rules
|
||||
|
||||
We realize that we don't have a Continuous Integration (CI) system in place yet (maybe you could start by contributing this). So, please be patient if Pull Requests result in some detailed discussions.
|
||||
|
||||
## Git workflows with ISCE
|
||||
|
||||
This is not a git tutorial or reference manual by any means. This just collects a few best practice for git usage for ISCE development. There are plenty of good resources on YouTube and online to help get started.
|
||||
|
||||
### Commit message
|
||||
|
||||
Indicate a component name, a short description and when relevant, a reference to a issue (with 'fixes #' if it actually fixes it)
|
||||
|
||||
```
|
||||
COMPONENT_NAME: fix bla bla (fixes #1234)
|
||||
|
||||
Details here...
|
||||
```
|
||||
|
||||
### Initiate your work repository
|
||||
|
||||
|
||||
Fork isce-framework/isce from github UI, and then
|
||||
```
|
||||
git clone https://github.com/isce_framework/isce2
|
||||
cd isce2
|
||||
git remote add my_user_name https://github.com/my_user_name/isce2.git
|
||||
```
|
||||
|
||||
### Updating your local main branch against upstream
|
||||
|
||||
```
|
||||
git checkout main
|
||||
git fetch origin
|
||||
# Be careful: this will lose all local changes you might have done now
|
||||
git reset --hard origin/main
|
||||
```
|
||||
|
||||
### Working with a feature branch
|
||||
|
||||
```
|
||||
git checkout main
|
||||
(potentially update your local reference against upstream, as described above)
|
||||
git checkout -b my_new_feature_branch
|
||||
|
||||
# do work. For example:
|
||||
git add my_new_file
|
||||
git add my_modifid_message
|
||||
git rm old_file
|
||||
git commit -a
|
||||
|
||||
# you may need to resynchronize against main if you need some bugfix
|
||||
# or new capability that has been added to main since you created your
|
||||
# branch
|
||||
git fetch origin
|
||||
git rebase origin/main
|
||||
|
||||
# At end of your work, make sure history is reasonable by folding non
|
||||
# significant commits into a consistent set
|
||||
git rebase -i main (use 'fixup' for example to merge several commits together,
|
||||
and 'reword' to modify commit messages)
|
||||
|
||||
# or alternatively, in case there is a big number of commits and marking
|
||||
# all them as 'fixup' is tedious
|
||||
git fetch origin
|
||||
git rebase origin/main
|
||||
git reset --soft origin/main
|
||||
git commit -a -m "Put here the synthetic commit message"
|
||||
|
||||
# push your branch
|
||||
git push my_user_name my_new_feature_branch
|
||||
From GitHub UI, issue a pull request
|
||||
```
|
||||
|
||||
If the pull request discussion results in changes,
|
||||
commit locally and push. To get a reasonable history, you may need to
|
||||
```
|
||||
git rebase -i main
|
||||
```
|
||||
, in which case you will have to force-push your branch with
|
||||
```
|
||||
git push -f my_user_name my_new_feature_branch
|
||||
```
|
||||
|
||||
### Things you should NOT do
|
||||
|
||||
(For anyone with push rights to github.com/isce-framework/isce2) Never modify a commit or
|
||||
the history of anything that has been
|
||||
committed to https://github.com/isce-framework/isce2
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
Copyright 2008 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
|
||||
United States Government Sponsorship acknowledged. This software is subject to
|
||||
U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
(No [Export] License Required except when exporting to an embargoed country,
|
||||
end user, or in support of a prohibited end use). By downloading this software,
|
||||
the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
The user has the responsibility to obtain export licenses, or other export
|
||||
authority as may be required before exporting this software to any 'EAR99'
|
||||
embargoed foreign country or citizen of those countries.
|
||||
|
|
@ -0,0 +1,422 @@
|
|||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<meta http-equiv="X-UA-Compatible" content="IE=edge">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1">
|
||||
<meta name="description" content="Home page of The Apache Software Foundation">
|
||||
|
||||
<link rel="apple-touch-icon" sizes="57x57" href="/favicons/apple-touch-icon-57x57.png">
|
||||
<link rel="apple-touch-icon" sizes="60x60" href="/favicons/apple-touch-icon-60x60.png">
|
||||
<link rel="apple-touch-icon" sizes="72x72" href="/favicons/apple-touch-icon-72x72.png">
|
||||
<link rel="apple-touch-icon" sizes="76x76" href="/favicons/apple-touch-icon-76x76.png">
|
||||
<link rel="apple-touch-icon" sizes="114x114" href="/favicons/apple-touch-icon-114x114.png">
|
||||
<link rel="apple-touch-icon" sizes="120x120" href="/favicons/apple-touch-icon-120x120.png">
|
||||
<link rel="apple-touch-icon" sizes="144x144" href="/favicons/apple-touch-icon-144x144.png">
|
||||
<link rel="apple-touch-icon" sizes="152x152" href="/favicons/apple-touch-icon-152x152.png">
|
||||
<link rel="apple-touch-icon" sizes="180x180" href="/favicons/apple-touch-icon-180x180.png">
|
||||
<link rel="icon" type="image/png" href="/favicons/favicon-32x32.png" sizes="32x32">
|
||||
<link rel="icon" type="image/png" href="/favicons/favicon-194x194.png" sizes="194x194">
|
||||
<link rel="icon" type="image/png" href="/favicons/favicon-96x96.png" sizes="96x96">
|
||||
<link rel="icon" type="image/png" href="/favicons/android-chrome-192x192.png" sizes="192x192">
|
||||
<link rel="icon" type="image/png" href="/favicons/favicon-16x16.png" sizes="16x16">
|
||||
<link rel="manifest" href="/favicons/manifest.json">
|
||||
<link rel="shortcut icon" href="/favicons/favicon.ico">
|
||||
<meta name="msapplication-TileColor" content="#603cba">
|
||||
<meta name="msapplication-TileImage" content="/favicons/mstile-144x144.png">
|
||||
<meta name="msapplication-config" content="/favicons/browserconfig.xml">
|
||||
<meta name="theme-color" content="#303284">
|
||||
|
||||
<title>Apache License, Version 2.0</title>
|
||||
<link href='https://fonts.googleapis.com/css?family=Source+Sans+Pro:400,700%7cDroid+Serif:400,700' rel='stylesheet' type='text/css'>
|
||||
<link href="/css/min.bootstrap.css" rel="stylesheet">
|
||||
<link href="/css/styles.css" rel="stylesheet">
|
||||
|
||||
|
||||
<!-- Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at . http://www.apache.org/licenses/LICENSE-2.0 . Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. -->
|
||||
|
||||
</head>
|
||||
|
||||
<body>
|
||||
<!-- Navigation -->
|
||||
<header>
|
||||
<nav class="navbar navbar-default navbar-fixed-top">
|
||||
<div class="container">
|
||||
<div class="navbar-header">
|
||||
<button class="navbar-toggle" type="button" data-toggle="collapse" data-target="#mainnav-collapse">
|
||||
<span class="sr-only">Toggle navigation</span>
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
<span class="icon-bar"></span>
|
||||
</button>
|
||||
<a href="#" class="navbar-brand"><span class="glyphicon glyphicon-home"></span></a>
|
||||
</div>
|
||||
<div class="collapse navbar-collapse" id="mainnav-collapse">
|
||||
<div style="line-height:20px; padding-top:5px; float:left"><a href="/">Home</a> » <a href="/licenses/">Licenses</a></div>
|
||||
<ul class="nav navbar-nav navbar-right">
|
||||
<li class="dropdown">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">About <span class="caret"></span></a>
|
||||
<ul class="dropdown-menu" role="menu">
|
||||
<li><a href="/foundation">Overview</a></li>
|
||||
<li><a href="/foundation/members.html">Members</a></li>
|
||||
<li><a href="/foundation/how-it-works.html">Process</a></li>
|
||||
<li><a href="/foundation/sponsorship.html">Sponsorship</a></li>
|
||||
<li><a href="/foundation/glossary.html">Glossary</a></li>
|
||||
<li><a href="/foundation/preFAQ.html">FAQ</a></li>
|
||||
<li><a href="/foundation/policies/conduct.html">Code of Conduct</a></li>
|
||||
<li><a href="/foundation/contact.html ">Contact</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><a href="/index.html#projects-list">Projects</a></li>
|
||||
<li class="dropdown">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">People <span class="caret"></span></a>
|
||||
<ul class="dropdown-menu" role="menu">
|
||||
<li><a href="http://people.apache.org/">Overview</a></li>
|
||||
<li><a href="http://people.apache.org/committer-index.html">Committers</a></li>
|
||||
<li><a href="/foundation/how-it-works.html#meritocracy">Meritocracy</a></li>
|
||||
<li><a href="/foundation/how-it-works.html#roles">Roles</a></li>
|
||||
<li><a href="/foundation/policies/conduct.html">Code of Conduct</a></li>
|
||||
<li><a href="http://planet.apache.org/">Planet Apache</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li class="dropdown">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Get Involved <span class="caret"></span></a>
|
||||
<ul class="dropdown-menu" role="menu">
|
||||
<li><a href="/foundation/getinvolved.html">Overview</a></li>
|
||||
<li><a href="http://community.apache.org/">Community Development</a></li>
|
||||
<li><a href="/foundation/policies/conduct.html">Code of Conduct</a></li>
|
||||
<li><a href="http://helpwanted.apache.org/">Help Wanted</a></li>
|
||||
<li><a href="http://www.apachecon.com/">ApacheCon</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
<li><a href="/dyn/closer.cgi">Download</a></li>
|
||||
<li class="dropdown">
|
||||
<a href="#" class="dropdown-toggle" data-toggle="dropdown">Support Apache <span class="caret"></span></a>
|
||||
<ul class="dropdown-menu" role="menu">
|
||||
<li><a href="/foundation/sponsorship.html">Sponsorship</a></li>
|
||||
<li><a href="/foundation/contributing.html">Donations</a></li>
|
||||
<li><a href="/foundation/buy_stuff.html">Buy Stuff</a></li>
|
||||
<li><a href="/foundation/thanks.html">Thanks</a></li>
|
||||
</ul>
|
||||
</li>
|
||||
</ul>
|
||||
</div>
|
||||
</div>
|
||||
</nav>
|
||||
</header>
|
||||
<!-- / Navigation -->
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<div class="col-md-6 col-sm-5 col-xs-12">
|
||||
<img src="/img/asf_logo.png" alt="Apache Logo" style="max-width: 100%;">
|
||||
</div>
|
||||
<div class="col-md-3 col-sm-3 col-xs-6">
|
||||
<a href="http://apache.org/foundation/contributing.html" title="Support Apache">
|
||||
<img src="/images/SupportApache-small.png" style="height: 150px; width: 150px; margin-top: 5px; margin-bottom: 5px;">
|
||||
</a>
|
||||
</div>
|
||||
<div class="col-md-3 col-sm-4 col-xs-6">
|
||||
<div class="input-group" style="margin-bottom: 5px;">
|
||||
<script>
|
||||
(function() {
|
||||
var cx = '005703438322411770421:5mgshgrgx2u';
|
||||
var gcse = document.createElement('script');
|
||||
gcse.type = 'text/javascript';
|
||||
gcse.async = true;
|
||||
gcse.src = (document.location.protocol == 'https:' ? 'https:' : 'http:') +
|
||||
'//cse.google.com/cse.js?cx=' + cx;
|
||||
var s = document.getElementsByTagName('script')[0];
|
||||
s.parentNode.insertBefore(gcse, s);
|
||||
})();
|
||||
</script>
|
||||
<gcse:searchbox-only></gcse:searchbox-only>
|
||||
</div>
|
||||
<a role="button" class="btn btn-block btn-default btn-xs" href="/foundation/how-it-works.html">The Apache Way</a>
|
||||
<a role="button" class="btn btn-block btn-default btn-xs" href="https://community.apache.org/contributors/">Contribute</a>
|
||||
<a role="button" class="btn btn-block btn-default btn-xs" href="/foundation/thanks.html">ASF Sponsors</a>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
<div class="container"><style type="text/css">
|
||||
/* The following code is added by mdx_elementid.py
|
||||
It was originally lifted from http://subversion.apache.org/style/site.css */
|
||||
/*
|
||||
* Hide class="elementid-permalink", except when an enclosing heading
|
||||
* has the :hover property.
|
||||
*/
|
||||
.headerlink, .elementid-permalink {
|
||||
visibility: hidden;
|
||||
}
|
||||
h2:hover > .headerlink, h3:hover > .headerlink, h1:hover > .headerlink, h6:hover > .headerlink, h4:hover > .headerlink, h5:hover > .headerlink, dt:hover > .elementid-permalink { visibility: visible }</style>
|
||||
<p>Apache License<br></br>Version 2.0, January 2004<br></br>
|
||||
<a href="http://www.apache.org/licenses/">http://www.apache.org/licenses/</a> </p>
|
||||
<p>TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION</p>
|
||||
<p><strong><a name="definitions">1. Definitions</a></strong>.</p>
|
||||
<p>"License" shall mean the terms and conditions for use, reproduction, and
|
||||
distribution as defined by Sections 1 through 9 of this document.</p>
|
||||
<p>"Licensor" shall mean the copyright owner or entity authorized by the
|
||||
copyright owner that is granting the License.</p>
|
||||
<p>"Legal Entity" shall mean the union of the acting entity and all other
|
||||
entities that control, are controlled by, or are under common control with
|
||||
that entity. For the purposes of this definition, "control" means (i) the
|
||||
power, direct or indirect, to cause the direction or management of such
|
||||
entity, whether by contract or otherwise, or (ii) ownership of fifty
|
||||
percent (50%) or more of the outstanding shares, or (iii) beneficial
|
||||
ownership of such entity.</p>
|
||||
<p>"You" (or "Your") shall mean an individual or Legal Entity exercising
|
||||
permissions granted by this License.</p>
|
||||
<p>"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation source,
|
||||
and configuration files.</p>
|
||||
<p>"Object" form shall mean any form resulting from mechanical transformation
|
||||
or translation of a Source form, including but not limited to compiled
|
||||
object code, generated documentation, and conversions to other media types.</p>
|
||||
<p>"Work" shall mean the work of authorship, whether in Source or Object form,
|
||||
made available under the License, as indicated by a copyright notice that
|
||||
is included in or attached to the work (an example is provided in the
|
||||
Appendix below).</p>
|
||||
<p>"Derivative Works" shall mean any work, whether in Source or Object form,
|
||||
that is based on (or derived from) the Work and for which the editorial
|
||||
revisions, annotations, elaborations, or other modifications represent, as
|
||||
a whole, an original work of authorship. For the purposes of this License,
|
||||
Derivative Works shall not include works that remain separable from, or
|
||||
merely link (or bind by name) to the interfaces of, the Work and Derivative
|
||||
Works thereof.</p>
|
||||
<p>"Contribution" shall mean any work of authorship, including the original
|
||||
version of the Work and any modifications or additions to that Work or
|
||||
Derivative Works thereof, that is intentionally submitted to Licensor for
|
||||
inclusion in the Work by the copyright owner or by an individual or Legal
|
||||
Entity authorized to submit on behalf of the copyright owner. For the
|
||||
purposes of this definition, "submitted" means any form of electronic,
|
||||
verbal, or written communication sent to the Licensor or its
|
||||
representatives, including but not limited to communication on electronic
|
||||
mailing lists, source code control systems, and issue tracking systems that
|
||||
are managed by, or on behalf of, the Licensor for the purpose of discussing
|
||||
and improving the Work, but excluding communication that is conspicuously
|
||||
marked or otherwise designated in writing by the copyright owner as "Not a
|
||||
Contribution."</p>
|
||||
<p>"Contributor" shall mean Licensor and any individual or Legal Entity on
|
||||
behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.</p>
|
||||
<p><strong><a name="copyright">2. Grant of Copyright License</a></strong>. Subject to the
|
||||
terms and conditions of this License, each Contributor hereby grants to You
|
||||
a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of, publicly
|
||||
display, publicly perform, sublicense, and distribute the Work and such
|
||||
Derivative Works in Source or Object form.</p>
|
||||
<p><strong><a name="patent">3. Grant of Patent License</a></strong>. Subject to the terms
|
||||
and conditions of this License, each Contributor hereby grants to You a
|
||||
perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made, use,
|
||||
offer to sell, sell, import, and otherwise transfer the Work, where such
|
||||
license applies only to those patent claims licensable by such Contributor
|
||||
that are necessarily infringed by their Contribution(s) alone or by
|
||||
combination of their Contribution(s) with the Work to which such
|
||||
Contribution(s) was submitted. If You institute patent litigation against
|
||||
any entity (including a cross-claim or counterclaim in a lawsuit) alleging
|
||||
that the Work or a Contribution incorporated within the Work constitutes
|
||||
direct or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate as of the
|
||||
date such litigation is filed.</p>
|
||||
<p><strong><a name="redistribution">4. Redistribution</a></strong>. You may reproduce and
|
||||
distribute copies of the Work or Derivative Works thereof in any medium,
|
||||
with or without modifications, and in Source or Object form, provided that
|
||||
You meet the following conditions:</p>
|
||||
<ol style="list-style: lower-latin;">
|
||||
<li>You must give any other recipients of the Work or Derivative Works a
|
||||
copy of this License; and</li>
|
||||
|
||||
<li>You must cause any modified files to carry prominent notices stating
|
||||
that You changed the files; and</li>
|
||||
|
||||
<li>You must retain, in the Source form of any Derivative Works that You
|
||||
distribute, all copyright, patent, trademark, and attribution notices from
|
||||
the Source form of the Work, excluding those notices that do not pertain to
|
||||
any part of the Derivative Works; and</li>
|
||||
|
||||
<li>If the Work includes a "NOTICE" text file as part of its distribution,
|
||||
then any Derivative Works that You distribute must include a readable copy
|
||||
of the attribution notices contained within such NOTICE file, excluding
|
||||
those notices that do not pertain to any part of the Derivative Works, in
|
||||
at least one of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or documentation,
|
||||
if provided along with the Derivative Works; or, within a display generated
|
||||
by the Derivative Works, if and wherever such third-party notices normally
|
||||
appear. The contents of the NOTICE file are for informational purposes only
|
||||
and do not modify the License. You may add Your own attribution notices
|
||||
within Derivative Works that You distribute, alongside or as an addendum to
|
||||
the NOTICE text from the Work, provided that such additional attribution
|
||||
notices cannot be construed as modifying the License.
|
||||
<br/>
|
||||
<br/>
|
||||
You may add Your own copyright statement to Your modifications and may
|
||||
provide additional or different license terms and conditions for use,
|
||||
reproduction, or distribution of Your modifications, or for any such
|
||||
Derivative Works as a whole, provided Your use, reproduction, and
|
||||
distribution of the Work otherwise complies with the conditions stated in
|
||||
this License.
|
||||
</li>
|
||||
|
||||
</ol>
|
||||
|
||||
<p><strong><a name="contributions">5. Submission of Contributions</a></strong>. Unless You
|
||||
explicitly state otherwise, any Contribution intentionally submitted for
|
||||
inclusion in the Work by You to the Licensor shall be under the terms and
|
||||
conditions of this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify the
|
||||
terms of any separate license agreement you may have executed with Licensor
|
||||
regarding such Contributions.</p>
|
||||
<p><strong><a name="trademarks">6. Trademarks</a></strong>. This License does not grant
|
||||
permission to use the trade names, trademarks, service marks, or product
|
||||
names of the Licensor, except as required for reasonable and customary use
|
||||
in describing the origin of the Work and reproducing the content of the
|
||||
NOTICE file.</p>
|
||||
<p><strong><a name="no-warranty">7. Disclaimer of Warranty</a></strong>. Unless required by
|
||||
applicable law or agreed to in writing, Licensor provides the Work (and
|
||||
each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT
|
||||
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including,
|
||||
without limitation, any warranties or conditions of TITLE,
|
||||
NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You
|
||||
are solely responsible for determining the appropriateness of using or
|
||||
redistributing the Work and assume any risks associated with Your exercise
|
||||
of permissions under this License.</p>
|
||||
<p><strong><a name="no-liability">8. Limitation of Liability</a></strong>. In no event and
|
||||
under no legal theory, whether in tort (including negligence), contract, or
|
||||
otherwise, unless required by applicable law (such as deliberate and
|
||||
grossly negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a result
|
||||
of this License or out of the use or inability to use the Work (including
|
||||
but not limited to damages for loss of goodwill, work stoppage, computer
|
||||
failure or malfunction, or any and all other commercial damages or losses),
|
||||
even if such Contributor has been advised of the possibility of such
|
||||
damages.</p>
|
||||
<p><strong><a name="additional">9. Accepting Warranty or Additional Liability</a></strong>.
|
||||
While redistributing the Work or Derivative Works thereof, You may choose
|
||||
to offer, and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this License.
|
||||
However, in accepting such obligations, You may act only on Your own behalf
|
||||
and on Your sole responsibility, not on behalf of any other Contributor,
|
||||
and only if You agree to indemnify, defend, and hold each Contributor
|
||||
harmless for any liability incurred by, or claims asserted against, such
|
||||
Contributor by reason of your accepting any such warranty or additional
|
||||
liability.</p>
|
||||
<p>END OF TERMS AND CONDITIONS</p>
|
||||
<h1 id="apply">APPENDIX: How to apply the Apache License to your work<a class="headerlink" href="#apply" title="Permanent link">¶</a></h1>
|
||||
<p>To apply the Apache License to your work, attach the following boilerplate
|
||||
notice, with the fields enclosed by brackets "[]" replaced with your own
|
||||
identifying information. (Don't include the brackets!) The text should be
|
||||
enclosed in the appropriate comment syntax for the file format. We also
|
||||
recommend that a file or class name and description of purpose be included
|
||||
on the same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.</p>
|
||||
<div class="codehilite"><pre>Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
</pre></div></div>
|
||||
|
||||
<!-- Footer -->
|
||||
|
||||
<footer class="bg-primary">
|
||||
<div class="container">
|
||||
<div class="row">
|
||||
<br />
|
||||
<div class="col-sm-1">
|
||||
|
||||
</div>
|
||||
<div class="col-sm-2">
|
||||
<h5 class="white">Community</h5>
|
||||
<ul class="list-unstyled white" role="menu">
|
||||
<li><a href="http://community.apache.org/">Overview</a></li>
|
||||
<li><a href="/foundation/conferences.html">Conferences</a></li>
|
||||
<li><a href="http://community.apache.org/gsoc.html">Summer of Code</a></li>
|
||||
<li><a href="http://community.apache.org/newcomers/">Getting Started</a></li>
|
||||
<li><a href="/foundation/how-it-works.html">The Apache Way</a></li>
|
||||
<li><a href="/travel/">Travel Assistance</a></li>
|
||||
<li><a href="/foundation/getinvolved.html">Get Involved</a></li>
|
||||
<li><a href="/foundation/policies/conduct.html">Code of Conduct</a></li>
|
||||
<li><a href="http://community.apache.org/newbiefaq.html">Community FAQ</a></li>
|
||||
<li><a href="/memorials/">Memorials</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="col-sm-2">
|
||||
<h5 class="white">Innovation</h5>
|
||||
<ul class="list-unstyled white" role="menu">
|
||||
<li><a href="http://incubator.apache.org/">Incubator</a></li>
|
||||
<li><a href="http://labs.apache.org/">Labs</a></li>
|
||||
<li><a href="/licenses/">Licensing</a></li>
|
||||
<li><a href="/foundation/license-faq.html">Licensing FAQ</a></li>
|
||||
<li><a href="/foundation/marks/">Trademark Policy</a></li>
|
||||
<li><a href="/foundation/contact.html">Contacts</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="col-sm-2">
|
||||
<h5 class="white">Tech Operations</h5>
|
||||
<ul class="list-unstyled white" role="menu">
|
||||
<li><a href="/dev/">Developer Information</a></li>
|
||||
<li><a href="/dev/infrastructure.html">Infrastructure</a></li>
|
||||
<li><a href="/security/">Security</a></li>
|
||||
<li><a href="http://status.apache.org">Status</a></li>
|
||||
<li><a href="/foundation/contact.html">Contacts</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="col-sm-2">
|
||||
<h5 class="white">Press</h5>
|
||||
<ul class="list-unstyled white" role="menu">
|
||||
<li><a href="/press/">Overview</a></li>
|
||||
<li><a href="https://blogs.apache.org/">ASF News</a></li>
|
||||
<li><a href="https://blogs.apache.org/foundation/">Announcements</a></li>
|
||||
<li><a href="https://twitter.com/TheASF">Twitter Feed</a></li>
|
||||
<li><a href="/press/#contact">Contacts</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="col-sm-2">
|
||||
<h5 class="white">Legal</h5>
|
||||
<ul class="list-unstyled white" role="menu">
|
||||
<li><a href="/legal/">Legal Affairs</a></li>
|
||||
<li><a href="/legal/dmca.html">DMCA</a></li>
|
||||
<li><a href="/licenses/">Licenses</a></li>
|
||||
<li><a href="/foundation/marks/">Trademark Policy</a></li>
|
||||
<li><a href="/foundation/records/">Public Records</a></li>
|
||||
<li><a href="/foundation/policies/privacy.html">Privacy Policy</a></li>
|
||||
<li><a href="/licenses/exports/">Export Information</a></li>
|
||||
<li><a href="/foundation/license-faq.html">License/Distribution FAQ</a></li>
|
||||
<li><a href="/foundation/contact.html">Contacts</a></li>
|
||||
</ul>
|
||||
</div>
|
||||
|
||||
<div class="col-sm-1">
|
||||
</div>
|
||||
|
||||
</div>
|
||||
<hr class="col-lg-12 hr-white" />
|
||||
<div class="row">
|
||||
<div class="col-lg-12">
|
||||
<p class="text-center">Copyright © 2018 The Apache Software Foundation, Licensed under the <a class="white" href="http://www.apache.org/licenses/LICENSE-2.0">Apache License, Version 2.0</a>.</p>
|
||||
<p class="text-center">Apache and the Apache feather logo are trademarks of The Apache Software Foundation.</p>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
</footer>
|
||||
|
||||
<!-- / Footer -->
|
||||
|
||||
<script src="/js/jquery-2.1.1.min.js"></script>
|
||||
<script src="/js/bootstrap.js"></script>
|
||||
</body>
|
||||
</html>
|
||||
|
|
@ -0,0 +1,202 @@
|
|||
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [yyyy] [name of copyright owner]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
884
README.md
884
README.md
|
|
@ -1,3 +1,883 @@
|
|||
# ISCE_INSAR
|
||||
# ISCE2
|
||||
|
||||
ISCE_INSAR 的 LAMP 的开发分支版本
|
||||
[](https://circleci.com/gh/isce-framework/isce2)
|
||||
|
||||
This is the Interferometric synthetic aperture radar Scientific Computing
|
||||
Environment (ISCE). Its initial development was funded by NASA's Earth Science
|
||||
Technology Office (ESTO) under the Advanced Information Systems Technology
|
||||
(AIST) 2008 and is currently being funded under the NASA-ISRO SAR (NISAR)
|
||||
project.
|
||||
|
||||
THIS IS RESEARCH CODE PROVIDED TO YOU "AS IS" WITH NO WARRANTIES OF CORRECTNESS.
|
||||
USE AT YOUR OWN RISK.
|
||||
|
||||
This software is open source under the terms of the the Apache License. Its export
|
||||
classification is 'EAR99 NLR', which entails some restrictions and responsibilities.
|
||||
Please read the accompanying LICENSE.txt and LICENSE-2.0 files.
|
||||
|
||||
ISCE is a framework designed for the purpose of processing Interferometric
|
||||
Synthetic Aperture Radar (InSAR) data. The framework aspects of it have been
|
||||
designed as a general software development framework. It may have additional
|
||||
utility in a general sense for building other types of software packages. In
|
||||
its InSAR aspect ISCE supports data from many space-borne satellites and one
|
||||
air-borne platform. We continue to increase the number of sensors supported.
|
||||
At this time the sensors that are supported are the following: ALOS, ALOS2,
|
||||
COSMO_SKYMED, ENVISAT, ERS, KOMPSAT5, RADARSAT1, RADARSAT2, RISAT1, Sentinel1,
|
||||
TERRASARX, UAVSAR and SAOCOM1A.
|
||||
|
||||
## Contents
|
||||
|
||||
1. [Software Dependencies](#software-dependencies)
|
||||
- [Installing dependencies with Anaconda](#with-anaconda)
|
||||
- [Installing dependencies with Macports](#with-macports)
|
||||
- [Note On 'python3' Exectuable Convention](#python3-convention)
|
||||
- [License required for dependencies to enable some workflows in ISCE](#license-required-for-dependencies-to-enable-some-workflows-in-isce)
|
||||
2. [Building ISCE](#building-isce)
|
||||
- [SCons](#scons-recommended)
|
||||
- [Configuration control: SCONS\_CONFIG\_DIR and SConfigISCE](#configuration-control)
|
||||
- [Install ISCE](#install-isce)
|
||||
- [CMake](#cmake-experimental)
|
||||
- [Setup Your Environment](#setup-your-environment)
|
||||
3. [Running ISCE](#running-isce)
|
||||
- [Running ISCE from the command line](#running-isce-from-the-command-line)
|
||||
- [Running ISCE in the Python interpreter](#running-isce-in-the-python-interpreter)
|
||||
- [Running ISCE with steps](#running-isce-with-steps)
|
||||
- [Running ISCE stack processors](./contrib/stack/README.md)
|
||||
- [Notes on Digital Elevation Models (DEMs)](#notes-on-digital-elevation-models)
|
||||
4. [Input Files](#input-files)
|
||||
5. [Component Configurability](#component-configurability)
|
||||
- [Component Names: Family and Instance](#component-names-family-and-instance)
|
||||
- [Component Configuration Files: Locations, Names, Priorities](#component-configuration-files-locations-names-priorities)
|
||||
- [Component Configuration Help](#component-configuration-help)
|
||||
6. [User community Forums](#user-community-forums)
|
||||
|
||||
------
|
||||
|
||||
## 1. Software Dependencies
|
||||
|
||||
### Basic:
|
||||
|
||||
* gcc >= 4.8+ (with C++11 support)
|
||||
* fftw >= 3.2.2 (with single precision support)
|
||||
* Python >= 3.5 (3.6 preferred)
|
||||
* scons >= 2.0.1
|
||||
* curl - for automatic DEM downloads
|
||||
* GDAL and its Python bindings >= 2.2
|
||||
|
||||
### Optional:
|
||||
#### For a few sensor types:
|
||||
|
||||
* hdf5 >= 1.8.5 and h5py >= 1.3.1 - for COSMO-SkyMed, Kompsat5, and 'Generic' sensor
|
||||
|
||||
#### For mdx (image visualization tool) options:
|
||||
|
||||
* Motif libraries and include files
|
||||
* ImageMagick - for mdx production of kml file (advanced feature)
|
||||
* grace - for mdx production of color table and line plots (advanced feature)
|
||||
|
||||
#### For the "unwrap 2 stage" option:
|
||||
|
||||
RelaxIV and Pulp are required. Information on getting these packages if
|
||||
you want to try the unwrap 2 stage option:
|
||||
|
||||
* RelaxIV (a minimum cost flow relaxation algorithm coded in C++ by
|
||||
Antonio Frangioni and Claudio Gentile at the University of Pisa,
|
||||
based on the Fortran code developed by Dimitri Bertsekas while
|
||||
at MIT) is available at https://github.com/frangio68/Min-Cost-Flow-Class.
|
||||
The RelaxIV files should be placed in the directory: 'contrib/UnwrapComp/src/RelaxIV' so that ISCE will compile it properly.
|
||||
|
||||
* PULP: Use easy\_install or pip to install it or else clone it from,
|
||||
https://github.com/coin-or/pulp. Make sure the path to the installed
|
||||
pulp.py is on your PYTHONPATH environment variable (it should be the case
|
||||
if you use easy\_install or pip).
|
||||
|
||||
#### For splitSpectrum and GPU modules:
|
||||
|
||||
* cython3 - must have an executable named cython3 (use a symbolic link)
|
||||
* cuda - for GPUtopozero and GPUgeo2rdr
|
||||
* opencv - for split spectrum
|
||||
|
||||
### With Anaconda
|
||||
|
||||
The conda requirements file is shown below:
|
||||
```bash
|
||||
cython
|
||||
gdal
|
||||
git
|
||||
h5py
|
||||
libgdal
|
||||
pytest
|
||||
numpy
|
||||
fftw
|
||||
scipy
|
||||
basemap
|
||||
scons
|
||||
opencv
|
||||
```
|
||||
|
||||
With the above contents in a textfile named "requirements.txt"
|
||||
|
||||
```bash
|
||||
> conda install --yes --file requirements.txt
|
||||
```
|
||||
|
||||
Ensure that you create a link in the anaconda bin directory for cython3.
|
||||
|
||||
|
||||
### With Macports
|
||||
|
||||
The following ports (assuming gcc7 and python36) are needed on OSX
|
||||
|
||||
```bash
|
||||
gcc7
|
||||
openmotif
|
||||
python36
|
||||
fftw-3 +gcc7
|
||||
fftw-3-single +gcc7
|
||||
xorg-libXt +flat_namespace
|
||||
git
|
||||
hdf5 +gcc7
|
||||
h5utils
|
||||
netcdf +gcc7
|
||||
netcdf-cxx
|
||||
netcdf-fortran
|
||||
postgresql95
|
||||
postgresql95-server
|
||||
proj
|
||||
cairo
|
||||
scons
|
||||
opencv +python36
|
||||
ImageMagick
|
||||
gdal +expat +geos +hdf5 +netcdf +postgresql95 +sqlite3
|
||||
py36-numpy +gcc7 +openblas
|
||||
py36-scipy +gcc7 +openblas
|
||||
py36-matplotlib +cairo +tkinter
|
||||
py36-matplotlib-basemap
|
||||
py36-h5py
|
||||
py36-gdal
|
||||
```
|
||||
|
||||
### Python3 Convention
|
||||
|
||||
We follow the convention of most package managers in using the executable
|
||||
'python3' for Python3.x and 'python' for Python2.x. This makes it easy to turn
|
||||
Python code into executable commands that know which version of Python they
|
||||
should invoke by naming the appropriate version at the top of the executable
|
||||
file (as in #!/usr/bin/env python3 or #!/usr/bin/env python). Unfortunately,
|
||||
not all package managers (such as macports) follow this convention. Therefore,
|
||||
if you use one of a package manager that does not create the 'python3'
|
||||
executable automatically, then you should place a soft link on your path to
|
||||
have the command 'python3' on your path. Then you will be able to execute an
|
||||
ISCE application such as 'stripmapApp.py as "> stripmapApp.py" rather than as
|
||||
"> /path-to-Python3/python stripmapApp.py".
|
||||
|
||||
### License required for dependencies to enable some workflows in ISCE
|
||||
|
||||
Some of the applications, or workflows (such as insarApp.py and isceApp.py),
|
||||
in ISCE that may be familiar to users will not work with this open source version
|
||||
of ISCE without obtaining licensed components. WinSAR users who have downloaded
|
||||
ISCE from the UNAVCO website (https://winsar.unavco.org/software/isce) have signed
|
||||
the licence agreement and will be given access to those licensed components. Others
|
||||
wanting to use those specific workflows and components may be able to sign the
|
||||
agreement through UNAVCO if they become members there. Further instructions will
|
||||
be available for a possible other procedure for obtaining a license directly from
|
||||
the supplier of those components.
|
||||
|
||||
ISCE provides workflows that do not require the licensed components that
|
||||
may be used effectively and that will be supported going forward by the ISCE team.
|
||||
Users that need to work with newly processed data along with older processed data
|
||||
may require those licensed components as a convenience unless they also reprocess
|
||||
the older data with the same workflows available in this open source release.
|
||||
|
||||
|
||||
-------
|
||||
|
||||
## Building ISCE
|
||||
|
||||
### SCons (recommended)
|
||||
|
||||
#### Configuration control
|
||||
|
||||
Scons requires that configuration information be present in a directory
|
||||
specified by the environment variable SCONS\_CONFIG\_DIR. First, create a
|
||||
build configuration file, called SConfigISCE and place it in your chosen
|
||||
SCONS\_CONFIG\_DIR. The SConfigISCE file should contain the following
|
||||
information, note that the #-symbol denotes a comment and does not need
|
||||
to be present in the SConfigISCE file:
|
||||
|
||||
NOTE: Locations vary from system to system, so make sure to use the appropriate location.
|
||||
The one listed here are just for illustrative purpose.
|
||||
|
||||
```bash
|
||||
# The directory in which ISCE will be built
|
||||
PRJ_SCONS_BUILD = $ISCE_BUILD_ROOT/isce
|
||||
|
||||
# The directory into which ISCE will be installed
|
||||
PRJ_SCONS_INSTALL = $ISCE_INSTALL_ROOT/isce
|
||||
|
||||
# The location of libraries, such as libstdc++, libfftw3 (for most system
|
||||
# it's /usr/lib and/or /usr/local/lib/ and/or /opt/local/lib)
|
||||
LIBPATH = $YOUR_LIB_LOCATION_HOME/lib64 $YOUR_LIB_LOCATION_HOME/lib
|
||||
|
||||
# The location of Python.h. If you have multiple installations of python
|
||||
# make sure that it points to the right one
|
||||
CPPPATH = $YOUR_PYTHON_INSTALLATION_LOCATION/include/python3.xm $YOUR_PYTHON_INSTALLATION_LOCATION/lib/python3.x/site-packages/numpy/core/include
|
||||
|
||||
# The location of the fftw3.h (most likely something like /usr/include or
|
||||
# /usr/local/include /opt/local/include
|
||||
FORTRANPATH = $YOUR_FFTW3_INSTALLATION_LOCATION/include
|
||||
|
||||
# The location of your Fortran compiler. If not specified it will use the system one
|
||||
FORTRAN = $YOUR_COMPILER_LOCATION/bin/gfortran
|
||||
|
||||
# The location of your C compiler. If not specified it will use the system one
|
||||
CC = $YOUR_COMPILER_LOCATION/bin/gcc
|
||||
|
||||
# The location of your C++ compiler. If not specified it will use the system one
|
||||
CXX = $YOUR_COMPILER_LOCATION/bin/g++
|
||||
|
||||
#libraries needed for mdx display utility
|
||||
MOTIFLIBPATH = /opt/local/lib # path to libXm.dylib
|
||||
X11LIBPATH = /opt/local/lib # path to libXt.dylib
|
||||
MOTIFINCPATH = /opt/local/include # path to location of the Xm
|
||||
# directory with various include files (.h)
|
||||
X11INCPATH = /opt/local/include # path to location of the X11 directory
|
||||
# with various include files
|
||||
|
||||
#Explicitly enable cuda if needed
|
||||
ENABLE_CUDA = True
|
||||
CUDA_TOOLKIT_PATH = $YOUR_CUDA_INSTALLATION #/usr/local/cuda
|
||||
```
|
||||
|
||||
In the above listing of the SConfigISCE file, ISCE\_BUILD\_ROOT and
|
||||
ISCE\_INSTALL\_ROOT may be actual environment variables that you create or else
|
||||
you can replace them with the actual paths you choose to use for the build files
|
||||
and the install files. Also, in the following the capitalization of 'isce' as
|
||||
lower case does matter. This is the case-sensitive package name that Python
|
||||
code uses for importing isce.
|
||||
|
||||
#### Install ISCE
|
||||
|
||||
```bash
|
||||
cd isce
|
||||
scons install
|
||||
```
|
||||
|
||||
For a verbose install run:
|
||||
|
||||
```bash
|
||||
scons -Q install
|
||||
```
|
||||
|
||||
The scons command also allows you to explicitly specify the name of the
|
||||
SConfigISCE file, which could be used to specify an alternative file for
|
||||
(say SConfigISCE\_NEW) which must still be located in the same
|
||||
SCONS\_CONFIG\_DIR, run
|
||||
|
||||
```bash
|
||||
scons install --setupfile=SConfigISCE_NEW
|
||||
```
|
||||
|
||||
This will build the necessary components and install them into the location
|
||||
specified in the configuration file as PRJ\_SCONS\_INSTALL.
|
||||
|
||||
|
||||
##### Note about compiling ISCE after an unsuccessful build.
|
||||
|
||||
When building ISCE, scons will check the list of header files and libraries that
|
||||
ISCE requires. Scons will cache the results of this dependency checking. So,
|
||||
if you try to build ISCE and scons tells you that you are missing headers or
|
||||
libraries, then you should remove the cached files before trying to build ISCE
|
||||
again after installing the missing headers and libraries. The cached files are
|
||||
config.log, .sconfig.dblite, and the files in directory .sconf_temp. You should
|
||||
run the following command while in the top directory of the ISCE source (the
|
||||
directory containing the SConstruct file):
|
||||
|
||||
```bash
|
||||
> rm -rf config.log .sconfig.dblite .sconf_temp .sconsign.dblite
|
||||
```
|
||||
|
||||
and then try "scons install" again.
|
||||
|
||||
The same also applies for rebuilding with SCons after updating the code, e.g.
|
||||
via a `git pull`. If you encounter issues after such a change, it's recommended
|
||||
to remove the cache files and build directory and do a fresh rebuild.
|
||||
|
||||
### CMake (experimental)
|
||||
Make sure you have the following prerequisites:
|
||||
* CMake ≥ 3.13
|
||||
* GCC ≥ 4.8 (with C++11 support)
|
||||
* Python ≥ 3.5
|
||||
* Cython
|
||||
* FFTW 3
|
||||
* GDAL
|
||||
|
||||
```sh
|
||||
git clone https://github.com/isce-framework/isce2
|
||||
cd isce2
|
||||
mkdir build
|
||||
cd build
|
||||
cmake .. -DCMAKE_INSTALL_PREFIX=/my/isce/install/location
|
||||
make install
|
||||
```
|
||||
|
||||
#### Additional cmake configuration options
|
||||
|
||||
CMake uses `CMAKE_PREFIX_PATH` as a global prefix for finding packages,
|
||||
which can come in handy when using e.g. Anaconda:
|
||||
|
||||
```sh
|
||||
cmake [...] -DCMAKE_PREFIX_PATH=$CONDA_PREFIX
|
||||
```
|
||||
|
||||
On macOS, cmake will also look for systemwide "frameworks",
|
||||
which is usually not what you want when using Conda or Macports.
|
||||
|
||||
```sh
|
||||
cmake [...] -DCMAKE_FIND_FRAMEWORK=NEVER
|
||||
```
|
||||
|
||||
For packagers, the `PYTHON_MODULE_DIR` can be used to specify ISCE2's
|
||||
package installation location relative to the installation prefix
|
||||
|
||||
```sh
|
||||
cmake [...] -DPYTHON_MODULE_DIR=lib/python3.8m/site-packages
|
||||
```
|
||||
|
||||
### Setup Your Environment
|
||||
|
||||
Once everything is installed, you will need to set the following environment
|
||||
variables to run the programs included in ISCE ($ISCE_INSTALL_ROOT may be an
|
||||
environment variable you created [above](#configuration-control) or else replace it with the actual
|
||||
path to where you installed ISCE):
|
||||
|
||||
```bash
|
||||
export PYTHONPATH=$ISCE\_INSTALL\_ROOT:$PYTHONPATH
|
||||
```
|
||||
|
||||
and to put the executable commands in the ISCE applications directory on your
|
||||
PATH for convenience,
|
||||
|
||||
```bash
|
||||
export ISCE_HOME=$ISCE_INSTALL_ROOT/isce
|
||||
export PATH=$ISCE_HOME/applications:$PATH
|
||||
```
|
||||
|
||||
An optional environment variable is $ISCEDB. This variable points to a
|
||||
directory in which you may place xml files containing global preferences. More
|
||||
information on this directory and the files that you might place there is
|
||||
given below in Section on [Input Files](#input-files). For now you can ignore this environment variable.
|
||||
|
||||
To test your installation and your environment, do the following:
|
||||
|
||||
```bash
|
||||
> python3
|
||||
>>> import isce
|
||||
>>> isce.version.release_version
|
||||
```
|
||||
-----
|
||||
|
||||
## Running ISCE
|
||||
|
||||
### Running ISCE from the command line
|
||||
|
||||
Copy the example xml files located in the example directory in the ISCE source
|
||||
tree to a working directory and modify them to point to your own data. Run
|
||||
them using the command:
|
||||
|
||||
```bash
|
||||
> $ISCE_HOME/applications/stripmapApp.py isceInputFile.xml
|
||||
```
|
||||
|
||||
or (with $ISCE\_HOME/applications on your PATH) simply,
|
||||
|
||||
```bash
|
||||
> stripmapApp.py isceInputFile.xml
|
||||
```
|
||||
|
||||
The name of the input file on the command line is arbitrary. ISCE also looks
|
||||
for appropriately named input files in the local directory
|
||||
|
||||
You can also ask ISCE for help from the command line:
|
||||
|
||||
```bash
|
||||
> stripmapApp.py --help
|
||||
```
|
||||
|
||||
This will tell you the basic command and the options for the input file.
|
||||
Example input files are also given in the 'examples/input\_files' directory.
|
||||
|
||||
As explained in the [Component Configurability](#component-configurability) section below, it is also possible
|
||||
to run stripmapApp.py without giving an input file on the command line. ISCE will
|
||||
automatically find configuration files for applications and components if they
|
||||
are named appropriately.
|
||||
|
||||
### Running ISCE in the Python interpreter
|
||||
|
||||
It is also possible to run ISCE from within the Python interpreter. If you have
|
||||
an input file named insarInputs.xml you can do the following:
|
||||
|
||||
```bash
|
||||
%> python3
|
||||
>>> import isce
|
||||
>>> from stripmapApp import Insar
|
||||
>>> a = Insar(name="stripmapApp", cmdline="insarInputs.xml")
|
||||
>>> a.configure()
|
||||
>>> a.run()
|
||||
```
|
||||
|
||||
(As explained in the [Component Configurability](#component-configurability) section below, if the file
|
||||
insarInputs.xml were named stripmapApp.xml or insar.xml, then the 'cmdline' input
|
||||
on the line creating 'a' would not be necessary. The file 'stripmapApp.xml' would
|
||||
be loaded automatically because when 'a' is created above it is given the name
|
||||
'stripmapApp'. A file named 'insar.xml' would also be loaded automatically if it
|
||||
exists because the code defining stripmapApp.py gives all instances of it the
|
||||
'family' name 'insar'. See the Component Configurability section below for
|
||||
details.)
|
||||
|
||||
### Running ISCE with steps
|
||||
|
||||
An other way to run ISCE is the following:
|
||||
|
||||
```bash
|
||||
stripmapApp.py insar.xml --steps
|
||||
```
|
||||
|
||||
This will run stripmapApp.py from beginning to end as is done without the
|
||||
\-\-steps option, but with the added feature that the workflow state is
|
||||
stored in files after each step in the processing using Python's pickle
|
||||
module. This method of running stripmapApp.py is only a little slower
|
||||
and it uses extra disc space to store the pickle files, but it
|
||||
provides some advantage for debugging and for stopping and starting a
|
||||
workflow at any predetermined point in the flow.
|
||||
|
||||
The full options for running stripmapApp.py with steps is the following:
|
||||
|
||||
```bash
|
||||
stripmapApp.py insar.xml [--steps] [--start=<s>] [--end=<s>] [--dostep=<s>]
|
||||
```
|
||||
|
||||
where "\<s\>" is the name of a step. To see the full ordered list of steps
|
||||
the user can issue the following command:
|
||||
|
||||
```bash
|
||||
stripmapApp.py insar.xml --steps --help
|
||||
```
|
||||
|
||||
The \-\-steps option was explained above.
|
||||
The \-\-start and \-\-end option can be used together to process a range of steps.
|
||||
The \-\-dostep option is used to process a single step.
|
||||
|
||||
For the \-\-start and \-\-dostep options to work, of course, requires that the
|
||||
steps preceding the starting step have been run previously because the
|
||||
state of the work flow at the beginning of the first step to be run must
|
||||
be stored from a previous run.
|
||||
|
||||
An example for using steps might be to execute the end-to-end workflow
|
||||
with \-\-steps to store the state of the workflow after every step as in,
|
||||
|
||||
```bash
|
||||
stripmapApp.py insar.xml --steps
|
||||
```
|
||||
|
||||
Then use \-\-steps to rerun some of the steps (perhaps you made a code
|
||||
modification for one of the steps and want to test it without starting
|
||||
from the beginning) as in
|
||||
|
||||
```bash
|
||||
stripmapApp.py insar.xml --start=<step-name1> --end=<step-name2>
|
||||
```
|
||||
|
||||
or to rerun a single step as in
|
||||
|
||||
```bash
|
||||
stripmapApp.py insar.xml --dostep=<step-name>
|
||||
```
|
||||
|
||||
Running stripmapApp.py with \-\-steps also enables one to enter the Python
|
||||
interpreter after a run and load the state of the workflow at any stage
|
||||
and introspect the objects in the flow and play with them as follows,
|
||||
for example:
|
||||
|
||||
```bash
|
||||
%> python3
|
||||
>>> import isce
|
||||
>>> f = open("PICKLE/formslc")
|
||||
>>> import pickle
|
||||
>>> a = pickle.load(f)
|
||||
>>> o = f.getReferenceOrbit()
|
||||
>>> t, x, p, off = o._unpackOrbit()
|
||||
>>> print(t)
|
||||
>>> print(x)
|
||||
```
|
||||
|
||||
Someone with familiarity of the inner workings of ISCE can exploit
|
||||
this mode of interacting with the pickle object to discover much about
|
||||
the workflow states and also to edit the state to see its effect
|
||||
on a subsequent run with \-\-dostep or \-\-start.
|
||||
|
||||
### Running [ISCE stack processors](./contrib/stack/README.md)
|
||||
|
||||
### Notes on Digital Elevation Models
|
||||
|
||||
- ISCE will automatically download SRTM Digital Elevation Models when you run an
|
||||
application that requires a DEM. In order for this to work follow the next 2
|
||||
instructions:
|
||||
|
||||
1. You will need to have a user name and password from urs.earthdata.nasa.gov and
|
||||
you need to include LPDAAC applications to your account.
|
||||
|
||||
a. If you don't already have an earthdata username and password,
|
||||
you can set them at https://urs.earthdata.nasa.gov/
|
||||
|
||||
b. If you already have an earthdata account, please ensure that
|
||||
you add LPDAAC applications to your account:
|
||||
- Login to earthdata here: https://urs.earthdata.nasa.gov/home
|
||||
- Click on my applications on the profile
|
||||
- Click on “Add More Applications”
|
||||
- Search for “LP DAAC”
|
||||
- Select “LP DAAC Data Pool” and “LP DAAC OpenDAP” and approve.
|
||||
|
||||
2. create a file named .netrc with the following 3 lines:
|
||||
|
||||
```bash
|
||||
machine urs.earthdata.nasa.gov
|
||||
login your_earthdata_login_name
|
||||
password your_earthdata_password
|
||||
```
|
||||
|
||||
3. set permissions to prevent others from viewing your credentials:
|
||||
|
||||
```bash
|
||||
> chmod go-rwx .netrc
|
||||
```
|
||||
|
||||
- When you run applications that require a dem, such as stripmapApp.py, if a dem
|
||||
component is provided but the dem is referenced to the EGM96 geo reference (which
|
||||
is the case for SRTM DEMs) it will be converted to have the WGS84 ellipsoid as its
|
||||
reference. A new dem file with suffix wgs84 will be created.
|
||||
|
||||
- If no dem component is specified as an input a EGM96 will be automatically
|
||||
downloaded (provided you followed the preceding instructions to register at
|
||||
earthdata) and then it will be converted into WGS84.
|
||||
|
||||
- If you define an environment variable named DEMDB to contain the path to a
|
||||
directory, then ISCE applications will download the DEM (and water body mask files
|
||||
into the directory indicated by DEMDB. Also ISCE applications will look for the
|
||||
DEMs in the DEMDB directory and the local processing directory before downloading
|
||||
a new DEM. This will prevent ISCE from downloading multiple copies of a DEM if
|
||||
you work with data in different subdirectories that cover similar geographic
|
||||
locations.
|
||||
|
||||
|
||||
## Input Files
|
||||
|
||||
Input files are structured 'xml' documents. This section will briefly
|
||||
introduce their structure using a special case appropriate for processing ALOS
|
||||
data. Examples for the other sensor types can be found in the directory
|
||||
'examples/input\_files'.
|
||||
|
||||
The basic (ALOS) input file looks like this (indentation is optional):
|
||||
|
||||
### stripmapApp.xml (Option 1)
|
||||
|
||||
```xml
|
||||
<stripmapApp>
|
||||
<component name="stripmapApp">
|
||||
<property name="sensor name">ALOS</property>
|
||||
<component name="Reference">
|
||||
<property name="IMAGEFILE">
|
||||
/a/b/c/20070215/IMG-HH-ALPSRP056480670-H1.0__A
|
||||
</property>
|
||||
<property name="LEADERFILE">
|
||||
/a/b/c/20070215/LED-ALPSRP056480670-H1.0__A
|
||||
</property>
|
||||
<property name="OUTPUT">20070215</property>
|
||||
</component>
|
||||
<component name="Secondary">
|
||||
<property name="IMAGEFILE">
|
||||
/a/b/c/20061231/IMG-HH-ALPSRP049770670-H1.0__A
|
||||
</property>
|
||||
<property name="LEADERFILE">
|
||||
/a/b/c/20061231/LED-ALPSRP049770670-H1.0__A
|
||||
</property>
|
||||
<property name="OUTPUT">20061231</property>
|
||||
</component>
|
||||
</component>
|
||||
</stripmapApp>
|
||||
```
|
||||
|
||||
The data are enclosed between an opening tag and a closing tag. The \<stripmapApp\>
|
||||
tag is closed by the \<\/stripmapApp\> tag for example. This outer tag is necessary
|
||||
but its name has no significance. You can give it any name you like. The
|
||||
other tags, however, need to have the names shown above. There are 'property',
|
||||
and 'component' tags shown in this example.
|
||||
|
||||
The component tags have names that match a Component name in the ISCE code.
|
||||
The component tag named 'stripmapApp' refers to the configuration information for
|
||||
the Application (which is a Component) named "stripmapApp". Components contain
|
||||
properties and other components that are configurable. The property tags
|
||||
give the values of a single variable in the ISCE code. One of the properties
|
||||
defined in stripmapApp.py is the "sensor name" property. In the above example
|
||||
it is given the value ALOS. In order to run stripmapApp.py two images need to
|
||||
be specified. These are defined as components named 'Reference' and 'Secondary'.
|
||||
These components have properties named 'IMAGEFILE', 'LEADERFILE', and 'OUTPUT'
|
||||
with the values given in the above example.
|
||||
|
||||
NOTE: the capitalization of the property and component names are not of any
|
||||
importance. You could enter 'imagefile' instead of 'IMAGEFILE', for example,
|
||||
and it would work correctly. Also extra spaces in names that include spaces,
|
||||
such as "sensor name" do not matter.
|
||||
|
||||
There is a lot of flexibility provided by ISCE when constructing these input
|
||||
files through the use of "catalog" tags and "constant" tags.
|
||||
|
||||
A "catalog" tag can be used to indicate that the contents that would normally
|
||||
be found between an opening ad closing "component" tag are defined in another
|
||||
xml file. For example, the stripmapApp.xml file shown above could have been split
|
||||
between three files as follows:
|
||||
|
||||
### stripmapApp.xml (Option 2)
|
||||
|
||||
```xml
|
||||
<stripmapApp>
|
||||
<component name="insar">
|
||||
<property name="Sensor name">ALOS</property>
|
||||
<component name="reference">
|
||||
<catalog>20070215.xml</catalog>
|
||||
</component>
|
||||
<component name="secondary">
|
||||
<catalog>20061231.xml</catalog>
|
||||
</component>
|
||||
</component>
|
||||
</stripmapApp>
|
||||
```
|
||||
|
||||
#### 20070215.xml
|
||||
|
||||
```xml
|
||||
<component name="Reference">
|
||||
<property name="IMAGEFILE">
|
||||
/a/b/c/20070215/IMG-HH-ALPSRP056480670-H1.0__A
|
||||
</property>
|
||||
<property name="LEADERFILE">
|
||||
/a/b/c/20070215/LED-ALPSRP056480670-H1.0__A
|
||||
</property>
|
||||
<property name="OUTPUT">20070215 </property>
|
||||
</component>
|
||||
```
|
||||
|
||||
#### 20061231.xml
|
||||
|
||||
```xml
|
||||
<component name="Secondary">
|
||||
<property name="IMAGEFILE">
|
||||
/a/b/c/20061231/IMG-HH-ALPSRP049770670-H1.0__A
|
||||
</property>
|
||||
<property name="LEADERFILE">
|
||||
/a/b/c/20061231/LED-ALPSRP049770670-H1.0__A
|
||||
</property>
|
||||
<property name="OUTPUT">20061231</property>
|
||||
</component>
|
||||
```
|
||||
### rtcApp.xml
|
||||
The inputs are Sentinel GRD zipfiles
|
||||
```xml
|
||||
<rtcApp>
|
||||
<constant name="dir">/Users/data/sentinel1 </constant>
|
||||
<component name="rtcApp">
|
||||
<property name="sensor name">sentinel1</property>
|
||||
<property name="posting">100</property>
|
||||
<property name="polarizations">[VV, VH]</property>
|
||||
<property name="epsg id">32618</property>
|
||||
<property name="geocode spacing">100</property>
|
||||
<property name="geocode interpolation method">bilinear</property>
|
||||
<property name="apply thermal noise correction">True</property>
|
||||
<component name="reference">
|
||||
<property name="safe">$dir$/rtcApp/data/S1A_IW_GRDH_1SDV_20181221T225104_20181221T225129_025130_02C664_B46C.zip</property>
|
||||
<property name="orbit directory">$dir$/orbits</property>
|
||||
<property name="output directory">$dir$/rtcApp/output</property>
|
||||
</component>
|
||||
</component>
|
||||
</rtcApp>
|
||||
```
|
||||
-----
|
||||
|
||||
## Component Configurability
|
||||
|
||||
In the examples for running stripmapApp.py ([Here](#running-isce-from-the-command-line) and [Here](#running-isce-in-the-python-interpreter) above) the input
|
||||
data were entered by giving the name of an 'xml' file on the command line. The
|
||||
ISCE framework parses that 'xml' file to assign values to the configurable
|
||||
variables in the isce Application stripmapApp.py. The Application executes
|
||||
several steps in its workflow. Each of those steps are handled by a Component
|
||||
that is also configurable from input data. Each component may be configured
|
||||
independently from user input using appropriately named and placed xml files.
|
||||
This section will explain how to name these xml files and where to place them.
|
||||
|
||||
### Component Names: Family and Instance
|
||||
|
||||
Each configurable component has two "names" associated with it. These names
|
||||
are used in locating possible configuration xml files for those components. The
|
||||
first name associated with a configurable component is its "family" name. For
|
||||
stripmapApp.py, the family name is "insar". Inside the stripmapApp.py file an
|
||||
Application is created from a base class named Insar. That base class defines
|
||||
the family name "insar" that is given to every instance created from it. The
|
||||
particular instance that is created in the file stripmapApp.py is given the
|
||||
'instance name' 'stripmapApp'. If you look in the file near the bottom you will
|
||||
see the line,
|
||||
|
||||
```python
|
||||
insar = Insar(name="stripmapApp")
|
||||
```
|
||||
|
||||
This line creates an instance of the class Insar (that is given the family name
|
||||
'insar' elsewhere in the file) and gives it the instance name "stripmapApp".
|
||||
|
||||
Other applications could be created that could make several different instances
|
||||
of the Insar. Each instance would have the family name "insar" and would be
|
||||
given a unique instance name. This is possible for every component. In the
|
||||
above example xml files instances name "Reference" and "Secondary" of a family named
|
||||
"alos" are created.
|
||||
|
||||
### Component Configuration Files: Locations, Names, Priorities
|
||||
|
||||
The ISCE framework looks for xml configuration files when configuring every
|
||||
Component in its flow in 3 different places with different priorities. The
|
||||
configuration sequence loads configuration parameters found in these xml files
|
||||
in the sequence lowest to highest priority overwriting any parameters defined
|
||||
as it moves up the priority sequence. This layered approach allows a couple
|
||||
of advantages. It allows the user to define common parameters for all instances
|
||||
in one file while defining specific instance parameters in files named for those
|
||||
specific instances. It also allows global preferences to be set in a special
|
||||
directory that will apply unless the user overrides them with a higher priority
|
||||
xml file.
|
||||
|
||||
The priority sequence has two layers. The first layer is location of the xml
|
||||
file and the second is the name of the file. Within each of the 3 location
|
||||
priorities indicated below, the filename priority goes from 'family name' to
|
||||
'instance name'. That is, within a given location priority level, a file
|
||||
named after the 'family name' is loaded first and then a file with the
|
||||
'instance name' is loaded next and overwrites any property values read from the
|
||||
'family name' file.
|
||||
|
||||
The priority sequence for location is as follows:
|
||||
|
||||
(1) The highest priority location is on the command line. On the command line
|
||||
the filename can be anything you choose. Configuration parameters can also be
|
||||
entered directly on the command line as in the following example:
|
||||
|
||||
```bash
|
||||
> stripmapApp.py insar.reference.output=reference_c.raw
|
||||
```
|
||||
|
||||
This example indicates that the variable named 'output' of the Component
|
||||
named 'reference' belonging to the Component (or Application) named 'insar'
|
||||
will be given the name "reference\_c.raw".
|
||||
|
||||
The priority sequence on the command line goes from lowest priority on the left
|
||||
to highest priority on the right. So, if we use the command line,
|
||||
|
||||
```bash
|
||||
> stripmapApp.py myInputFile.xml insar.reference.output=reference_c.raw
|
||||
```
|
||||
|
||||
where the myInputFile.xml file also gives a value for the insar reference output
|
||||
file as reference\_d.raw, then the one defined on the right will win, i.e.,
|
||||
reference\_c.raw.
|
||||
|
||||
(2) The next priority location is the local directory in which stripmapApp.py is
|
||||
executed. Any xml file placed in this directory named according to either the
|
||||
family name or the instance name for any configurable component in ISCE will be
|
||||
read while configuring the component.
|
||||
|
||||
(3) If you define an environment variable named ISCEDB, you can place xml files
|
||||
with family names or instance names that will be read when configuring
|
||||
Configurable Components. These files placed in the ISCEDB directory have the
|
||||
lowest priority when configuring properties of the Components. The files placed
|
||||
in the ISCEDB directory can be used to define global settings that will apply
|
||||
unless the xml files in the local directory or the command line override those
|
||||
preferences.
|
||||
|
||||
### Component Configuration Structure
|
||||
|
||||
However, the component tag has to have the family name of the Component/
|
||||
Application. In the above examples you see
|
||||
that the outermost component tag has the name "insar", which is the family name
|
||||
of the class Insar of which stripmapApp is an instance.
|
||||
|
||||
|
||||
### Component Configuration Help
|
||||
|
||||
At this time there is limited information about component configurability
|
||||
through the command
|
||||
|
||||
```bash
|
||||
> stripmapApp.py --help
|
||||
```
|
||||
|
||||
Future deliveries will improve this situation. In the meantime we describe
|
||||
here how to discover from the code which Components and parameters are
|
||||
configurable. One note of caution is that it is possible for a parameter
|
||||
to appear to be configurable from user input when the particular flow will
|
||||
not allow this degree of freedom. Experience and evolving documentation will
|
||||
be of use in determining these cases.
|
||||
|
||||
How to find out whether a component is configurable, what its configurable
|
||||
parameters are, what "name" to use in the xml file, and what name to give to
|
||||
the xml file.
|
||||
|
||||
Let's take as an example, Ampcor.py, which is in components/mroipac/ampcor.
|
||||
|
||||
Open it in an editor and search for the string "class Ampcor". It is on
|
||||
line 263. You will see that it inherits from Component. This is the minimum
|
||||
requirement for it to be a configurable component.
|
||||
|
||||
Now look above that line and you will see several variable names being set
|
||||
equal to a call to Component.Parameter. These declarations define these
|
||||
variables as configurable parameters. They are entered in the "parameter\_list"
|
||||
starting on line 268. That is the method by which these Parameters are made
|
||||
configurable parameters of the Component Nstage.
|
||||
|
||||
Each of the parameters defines the "public\_name", which is the "name" that you
|
||||
would enter in the xml file. For instance if you want to set the gross offset
|
||||
in range, which is defined starting on line 88 in the variable
|
||||
ACROSS\_GROSS\_OFFSET, then you would use an xml tag like the following (assuming
|
||||
you have determined that the gross offset in range is about 150 pixels):
|
||||
|
||||
```xml
|
||||
<property name="ACROSS_GROSS_OFFSET">150</property>
|
||||
```
|
||||
|
||||
Now, to determine what to call the xml file and what "name" to use in the
|
||||
component tag. A configurable component has a "family" name and an instance
|
||||
"name". It is registered as having these names by calling the
|
||||
Component.\_\_init\_\_ constructor, which is done on line 806. On that line you
|
||||
will see that the call to \_\_init\_\_ passes 'family=self.class.family' and
|
||||
'name=name' to the Component constructor (super class of Ampcor). The family
|
||||
name is given as "nstage" on line 265. The instance name is passed as the
|
||||
value of the 'name=name' and was passed to it from whatever program created it.
|
||||
Nstage is created in components/isceobj/StripmapProc/runRefineSecondaryTiming.py where
|
||||
it is given the name 'reference_offset1' on line 35. If you are setting a parameter that
|
||||
should be the same for all uses of Ampcor, then you can use the
|
||||
family name 'ampcor' for the name of the xml file as 'ampcor.xml'. It is more
|
||||
likely that you will want to use the instance name 'reference\_offset1.xml'
|
||||
Use the family name 'ampcor' for the component tag 'name'.
|
||||
|
||||
Example for SLC matching use of Ampcor:
|
||||
|
||||
Filename: reference\_offset1.xml:
|
||||
|
||||
```xml
|
||||
<dummy>
|
||||
<component name="ampcor">
|
||||
<property name="ACROSS_GROSS_OFFSET">150</property>
|
||||
</component>
|
||||
</dummy>
|
||||
```
|
||||
|
||||
## User community forums
|
||||
|
||||
Read helpful information and participate in discussion with
|
||||
the user/developer community on GitHub Discussions:
|
||||
|
||||
https://github.com/isce-framework/isce2/discussions
|
||||
|
|
|
|||
|
|
@ -0,0 +1,253 @@
|
|||
#!/usr/bin/env python
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
if sys.version_info[0] == 2:
|
||||
print('Building with scons from python2')
|
||||
else:
|
||||
raw_input = input
|
||||
print('Building with scons from python3')
|
||||
|
||||
if 'SCONS_CONFIG_DIR' in os.environ:
|
||||
sconsConfigDir = os.environ['SCONS_CONFIG_DIR']
|
||||
else:
|
||||
print("Error. Need to set the variable SCONS_CONFIG_DIR in the shell environment")
|
||||
raise Exception
|
||||
|
||||
from configuration import sconsConfigFile
|
||||
#allow scons to take the input argument --setupfile=someOtherFile to allow change of the default SConfigISCE
|
||||
AddOption('--setupfile',dest='setupfile',type='string',default='SConfigISCE')
|
||||
AddOption('--isrerun',dest='isrerun',type='string',default='no')
|
||||
AddOption('--skipcheck',dest='skipcheck', action='store_true', default=False)
|
||||
|
||||
env = Environment(ENV = os.environ)
|
||||
sconsSetupFile = GetOption('setupfile')
|
||||
isrerun = GetOption('isrerun')
|
||||
skipcheck = GetOption('skipcheck')
|
||||
|
||||
sconsConfigFile.setupScons(env,sconsSetupFile)
|
||||
#add some information that are necessary to build the framework such as specific includes, libpath and so on
|
||||
buildDir = env['PRJ_SCONS_BUILD']
|
||||
libPath = os.path.join(buildDir,'libs')
|
||||
#this is the directory where all the built library are put so they can easily be found during linking
|
||||
env['PRJ_LIB_DIR'] = libPath
|
||||
|
||||
# add the libPath to the LIBPATH environment that is where all the libs are serched
|
||||
env.AppendUnique(LIBPATH = [libPath])
|
||||
# add the modPath to the FORTRANMODDIR environment that is where all the fortran mods are searched
|
||||
|
||||
#not working yet
|
||||
modPath = os.path.join(buildDir,'mods')
|
||||
env['FORTRANMODDIR'] = modPath
|
||||
env.AppendUnique(FORTRANPATH = [modPath])
|
||||
env.AppendUnique(F90PATH = [modPath])
|
||||
env.AppendUnique(F77PATH = [modPath])
|
||||
#add the includes needed by the framework
|
||||
imageApiInc = os.path.join(buildDir,'components/iscesys/ImageApi/include')
|
||||
dataCasterInc = os.path.join(buildDir,'components/iscesys/ImageApi/DataCaster/include')
|
||||
lineAccessorInc = os.path.join(buildDir,'components/isceobj/LineAccessor/include')
|
||||
stdOEInc = os.path.join(buildDir,'components/iscesys/StdOE/include')
|
||||
utilInc = os.path.join(buildDir,'components/isceobj/Util/include')
|
||||
utilLibInc = os.path.join(buildDir,'components/isceobj/Util/Library/include')
|
||||
|
||||
env.AppendUnique(CPPPATH = [imageApiInc,dataCasterInc,lineAccessorInc,stdOEInc,utilInc,utilLibInc])
|
||||
env['HELPER_DIR'] = os.path.join(env['PRJ_SCONS_INSTALL'],'helper')
|
||||
env['HELPER_BUILD_DIR'] = os.path.join(env['PRJ_SCONS_BUILD'],'helper')
|
||||
|
||||
#put the pointer function createHelp in the environment so it can be access anywhere
|
||||
from configuration.buildHelper import createHelp
|
||||
env['HELP_BUILDER'] = createHelp
|
||||
#Create an env variable to hold all the modules added to the sys.path by default.
|
||||
#They are the same as the one in in __init__.py in the same directory of this file
|
||||
moduleList = []
|
||||
installDir = env['PRJ_SCONS_INSTALL']
|
||||
moduleList.append(os.path.join(installDir,'applications'))
|
||||
moduleList.append(os.path.join(installDir,'components'))
|
||||
env['ISCEPATH'] = moduleList
|
||||
env.PrependUnique(LIBS=['gdal'])
|
||||
Export('env')
|
||||
|
||||
|
||||
inst = env['PRJ_SCONS_INSTALL']
|
||||
|
||||
####new part
|
||||
#####PSA. Check for header files and libraries up front
|
||||
confinst = Configure(env)
|
||||
hdrparams = [('python3 header', 'Python.h', 'Install python3-dev or add path to Python.h to CPPPATH'),
|
||||
('fftw3', 'fftw3.h', 'Install fftw3 or libfftw3-dev or add path to fftw3.h to CPPPATH and FORTRANPATH'),
|
||||
('hdf5', 'hdf5.h', 'Install HDF5 of libhdf5-dev or add path to hdf5.h to CPPPATH'),
|
||||
('X11', 'X11/Xlib.h', 'Install X11 or libx11-dev or add path to X11 directory to X11INCPATH'),
|
||||
('Xm', 'Xm/Xm.h', 'Install libXm or libXm-dev or add path to Xm directory to MOTIFINCPATH'),
|
||||
('openmp', 'omp.h', 'Compiler not built with OpenMP. Use a different compiler or add path to omp.h to CPPPATH'),]
|
||||
|
||||
allflag = False
|
||||
for (name,hname,msg) in hdrparams:
|
||||
if not (confinst.CheckCHeader(hname) or confinst.CheckCXXHeader(hname)):
|
||||
print('Could not find: {0} header for {1}'.format(hname, name))
|
||||
print('Error: {0}'.format(msg))
|
||||
allflag = True
|
||||
|
||||
libparams= [('libhdf5', 'hdf5', 'Install hdf5 or libhdf5-dev'),
|
||||
('libfftw3f', 'fftw3f', 'Install fftw3 or libfftw3-dev'),
|
||||
('libXm', 'Xm', 'Install Xm or libXm-dev'),
|
||||
('libXt', 'Xt', 'Install Xt or libXt-dev')]
|
||||
|
||||
for (name,hname,msg) in libparams:
|
||||
if not confinst.CheckLib(hname):
|
||||
print('Could not find: {0} lib for {1}'.format(hname, name))
|
||||
print('Error: {0}'.format(msg))
|
||||
allflag = True
|
||||
|
||||
if env.FindFile('fftw3.f', env['FORTRANPATH']) is None:
|
||||
print('Checking for F include fftw3 ... no')
|
||||
print('Could not find: fftw3.f header for fftw3')
|
||||
print('Error: Install fftw3 or libfftw3-dev or add path to FORTRANPATH')
|
||||
allflag = True
|
||||
else:
|
||||
print('Checking for F include fftw3 ... yes'.format(name))
|
||||
|
||||
|
||||
###This part added to handle GDAL and C++11
|
||||
gdal_version = os.popen('gdal-config --version').read()
|
||||
print('GDAL version: {0}'.format(gdal_version))
|
||||
try:
|
||||
gdal_majorversion = int(gdal_version.split('.')[0])
|
||||
gdal_subversion = int(gdal_version.split('.')[1])
|
||||
except:
|
||||
raise Exception('gdal-config not found. GDAL does not appear to be installed ... cannot proceed. If you have installed gdal, ensure that you have path to gdal-config in your environment')
|
||||
|
||||
env['GDALISCXX11'] = None
|
||||
if (gdal_majorversion > 2) or (gdal_subversion >= 3):
|
||||
env['GDALISCXX11'] = 'True'
|
||||
|
||||
|
||||
##Add C++11 for GDAL checks
|
||||
#Save default environment if C++11
|
||||
if env['GDALISCXX11']:
|
||||
preCXX11 = confinst.env['CXXFLAGS']
|
||||
confinst.env.Replace(CXXFLAGS=preCXX11 + ['-std=c++11'])
|
||||
|
||||
if not confinst.CheckCXXHeader('gdal_priv.h'):
|
||||
print('Could not find: gdal_priv.h for gdal')
|
||||
print('Install gdal or add path to gdal includes to CPPPATH')
|
||||
allflag = True
|
||||
|
||||
if not confinst.CheckLib('gdal'):
|
||||
print('Could not find: libgdal for gdal')
|
||||
print('Install gdal or include path to libs to LIBPATH')
|
||||
allflag = True
|
||||
|
||||
###If C++11, revert to original environment
|
||||
if env['GDALISCXX11']:
|
||||
confinst.env.Replace(CXXFLAGS=preCXX11)
|
||||
|
||||
|
||||
###Decide whether to complain or continue
|
||||
if (allflag and not skipcheck):
|
||||
print('Not all components of ISCE will be installed and can result in errors.')
|
||||
raw_input('Press Enter to continue.... Ctrl-C to exit')
|
||||
elif (allflag and skipcheck):
|
||||
print('Not all components of ISCE will be installed and can result in errors.')
|
||||
print('User has requested to skip checks. Expect failures ... continuing')
|
||||
else:
|
||||
print('Scons appears to find everything needed for installation')
|
||||
|
||||
try:
|
||||
# Older versions of scons do not have CheckProg, so 'try' to use it
|
||||
if confinst.CheckProg('cython3'):
|
||||
env['CYTHON3'] = True
|
||||
else:
|
||||
print('cython3 is not installed. Packages that depend on cython3 will not be installed.')
|
||||
env['CYTHON3'] = False
|
||||
except:
|
||||
# If CheckProg is not available set env['CYTHON3'] = True and hope for the best
|
||||
# If the cython3 link does not exist, then a later error should prompt the user to
|
||||
# create the cython3 link to their cython installed as cython.
|
||||
env['CYTHON3'] = True
|
||||
pass
|
||||
|
||||
env = confinst.Finish()
|
||||
###End of new part
|
||||
|
||||
### GPU branch-specific modifications
|
||||
if 'ENABLE_CUDA' in env and env['ENABLE_CUDA'].upper() == 'TRUE':
|
||||
print('User requested compilation with CUDA, if available')
|
||||
try:
|
||||
env.Tool('cuda', toolpath=['scons_tools'])
|
||||
env['GPU_ACC_ENABLED'] = True
|
||||
print("CUDA-relevant libraries and toolkit found. GPU acceleration may be enabled.")
|
||||
except:
|
||||
env['GPU_ACC_ENABLED'] = False
|
||||
print("CUDA-relevant libraries or toolkit not found. GPU acceleration will be disabled.")
|
||||
else:
|
||||
print('User did not request CUDA support. Add ENABLE_CUDA = True to SConfigISCE to enable CUDA support')
|
||||
env['GPU_ACC_ENABLED'] = False
|
||||
|
||||
### End of GPU branch-specific modifications
|
||||
|
||||
|
||||
env.Install(inst, '__init__.py')
|
||||
env.Install(inst, 'release_history.py')
|
||||
|
||||
if not os.path.exists(inst):
|
||||
os.makedirs(inst)
|
||||
|
||||
v = 0
|
||||
if isrerun == 'no':
|
||||
cmd = 'scons -Q install --isrerun=yes'
|
||||
if skipcheck:
|
||||
cmd += ' --skipcheck'
|
||||
v = os.system(cmd)
|
||||
if v == 0:
|
||||
env.Alias('install',inst)
|
||||
applications = os.path.join('applications','SConscript')
|
||||
SConscript(applications)
|
||||
components = os.path.join('components','SConscript')
|
||||
SConscript(components)
|
||||
defaults = os.path.join('defaults','SConscript')
|
||||
SConscript(defaults)
|
||||
library = os.path.join('library','SConscript')
|
||||
SConscript(library)
|
||||
contrib = os.path.join('contrib','SConscript')
|
||||
SConscript(contrib)
|
||||
|
||||
if 'test' in sys.argv:
|
||||
#Run the unit tests
|
||||
env['Test'] = True
|
||||
else:
|
||||
#Don't run tests.
|
||||
#This option only installs test support package for future test runs.
|
||||
env['Test'] = False
|
||||
|
||||
tests = os.path.join('test', 'SConscript')
|
||||
SConscript(tests)
|
||||
|
|
@ -0,0 +1,61 @@
|
|||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
from .release_history import release_version, release_svn_revision, release_date
|
||||
svn_revision = release_svn_revision
|
||||
version = release_history # compatibility alias
|
||||
|
||||
__version__ = release_version
|
||||
|
||||
import sys, os
|
||||
isce_path = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
import logging
|
||||
from logging.config import fileConfig as _fc
|
||||
_fc(os.path.join(isce_path, 'defaults', 'logging', 'logging.conf'))
|
||||
|
||||
sys.path.insert(1,isce_path)
|
||||
sys.path.insert(1,os.path.join(isce_path,'applications'))
|
||||
sys.path.insert(1,os.path.join(isce_path,'components'))
|
||||
sys.path.insert(1,os.path.join(isce_path,'library'))
|
||||
|
||||
try:
|
||||
os.environ['ISCE_HOME']
|
||||
except KeyError:
|
||||
print('Using default ISCE Path: %s'%(isce_path))
|
||||
os.environ['ISCE_HOME'] = isce_path
|
||||
|
||||
try:
|
||||
from . license import stanford_license
|
||||
except:
|
||||
print("This is the Open Source version of ISCE.")
|
||||
print("Some of the workflows depend on a separate licensed package.")
|
||||
print("To obtain the licensed package, please make a request for ISCE")
|
||||
print("through the website: https://download.jpl.nasa.gov/ops/request/index.cfm.")
|
||||
print("Alternatively, if you are a member, or can become a member of WinSAR")
|
||||
print("you may be able to obtain access to a version of the licensed sofware at")
|
||||
print("https://winsar.unavco.org/software/isce")
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
set(files
|
||||
__init__.py
|
||||
alos2App.py
|
||||
alos2burstApp.py
|
||||
dataTileManager.py
|
||||
DEM2ISCE.py
|
||||
dem.py
|
||||
demdb.py
|
||||
downsampleDEM.py
|
||||
fixImageXml.py
|
||||
gdal2isce_xml.py
|
||||
imageMath.py
|
||||
insarApp.py
|
||||
isce2geotiff.py
|
||||
isce2gis.py
|
||||
isceApp.py
|
||||
iscehelp.py
|
||||
looks.py
|
||||
make_raw.py
|
||||
mdx.py
|
||||
rtcApp.py
|
||||
stitcher.py
|
||||
stripmapApp.py
|
||||
topsApp.py
|
||||
upsampleDem.py
|
||||
waterMask.py
|
||||
wbd.py
|
||||
wbdStitcher.py
|
||||
)
|
||||
|
||||
install(PROGRAMS ${files}
|
||||
DESTINATION ${ISCE2_PKG}/applications)
|
||||
|
||||
# Symlink apps into PREFIX/bin so they are on the $PATH
|
||||
install(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E make_directory \
|
||||
${CMAKE_INSTALL_FULL_BINDIR})"
|
||||
)
|
||||
|
||||
foreach(file ${files})
|
||||
install(CODE "execute_process(COMMAND ${CMAKE_COMMAND} -E create_symlink \
|
||||
${ISCE2_PKG_FULL}/applications/${file} \
|
||||
${CMAKE_INSTALL_FULL_BINDIR}/${file})"
|
||||
)
|
||||
endforeach()
|
||||
|
|
@ -0,0 +1,72 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import math
|
||||
from isce import logging
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
from isceobj.Location.Peg import Peg
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
|
||||
class CalculatePegPoint(FactoryInit):
|
||||
|
||||
def calculatePegPoint(self):
|
||||
self.logger.info("Parsing Raw Data")
|
||||
self.sensorObj.parse()
|
||||
frame = self.sensorObj.getFrame()
|
||||
# First, get the orbit nadir location at mid-swath and the end of the scene
|
||||
orbit = self.sensorObj.getFrame().getOrbit()
|
||||
midxyz = orbit.interpolateOrbit(frame.getSensingMid())
|
||||
endxyz = orbit.interpolateOrbit(frame.getSensingStop())
|
||||
# Next, calculate the satellite heading from the mid-point to the end of the scene
|
||||
ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp()
|
||||
midllh = ellipsoid.xyz_to_llh(midxyz.getPosition())
|
||||
endllh = ellipsoid.xyz_to_llh(endxyz.getPosition())
|
||||
heading = ellipsoid.geo_hdg(midllh,endllh)
|
||||
# Then create a peg point from this data
|
||||
peg = Peg(latitude=midllh[0],longitude=midllh[1],heading=heading,ellipsoid=ellipsoid)
|
||||
self.logger.info("Peg Point:\n%s" % peg)
|
||||
|
||||
def __init__(self,arglist):
|
||||
FactoryInit.__init__(self)
|
||||
self.initFactory(arglist)
|
||||
self.sensorObj = self.getComponent('Sensor')
|
||||
self.logger = logging.getLogger('isce.calculatePegPoint')
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if (len(sys.argv) < 2):
|
||||
print("Usage:%s <xml-parameter file>" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
runObj = CalculatePegPoint(sys.argv[1:])
|
||||
runObj.calculatePegPoint()
|
||||
|
|
@ -0,0 +1,172 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# LAMP License
|
||||
#
|
||||
# Author: chenzenghui
|
||||
# time: 2023.06.04
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
# 自定义dem管理
|
||||
# 1. 创建二进制文件 ENVI hdr
|
||||
# 2. 读取文件构建.vrt .xml
|
||||
|
||||
import argparse
|
||||
import isce
|
||||
from ctypes import cdll, c_char_p, c_int, byref
|
||||
from array import array
|
||||
import struct
|
||||
import zipfile
|
||||
import os
|
||||
import sys
|
||||
import math
|
||||
import urllib.request, urllib.parse, urllib.error
|
||||
from isce import logging
|
||||
from iscesys.Component.Component import Component
|
||||
from isceobj.Image import createDemImage
|
||||
from osgeo import gdal,osr,ogr
|
||||
import xml.etree.ElementTree as ET
|
||||
from html.parser import HTMLParser
|
||||
import time
|
||||
|
||||
|
||||
|
||||
class DEM2ISCE(Component):
|
||||
|
||||
def dem_merged(self,in_dem_path, out_dem_path):
|
||||
'''
|
||||
DEM重采样函数,默认坐标系为WGS84
|
||||
agrs:
|
||||
in_dem_path: 输入的DEM文件夹路径
|
||||
meta_file_path: 输入的xml元文件路径
|
||||
out_dem_path: 输出的DEM文件夹路径
|
||||
'''
|
||||
# 读取文件夹中所有的DEM
|
||||
dem_file_paths=in_dem_path#[os.path.join(in_dem_path,dem_name) for dem_name in os.listdir(in_dem_path) if dem_name.find(".tif")>=0 and dem_name.find(".tif.")==-1]
|
||||
spatialreference=osr.SpatialReference()
|
||||
spatialreference.SetWellKnownGeogCS("WGS84") # 设置地理坐标,单位为度 degree # 设置投影坐标,单位为度 degree
|
||||
spatialproj=spatialreference.ExportToWkt() # 导出投影结果
|
||||
# 将DEM拼接成一张大图
|
||||
out_DEM=out_dem_path
|
||||
gdal.Warp(out_DEM,
|
||||
dem_file_paths,
|
||||
format="ENVI",
|
||||
dstSRS=spatialproj,
|
||||
dstNodata=self._NoDataValue,
|
||||
outputType=gdal.GDT_Float32)
|
||||
time.sleep(3)
|
||||
return out_DEM
|
||||
|
||||
#this method also create an actual DeimImage object that is returned by the getImage() method
|
||||
def createXmlMetadata(self,outname):
|
||||
demImage = self.createImage(outname)
|
||||
demImage.renderHdr()
|
||||
|
||||
def getDemWidth(self,outname):
|
||||
gdal.AllRegister()
|
||||
dataset=gdal.Open(outname)
|
||||
width=dataset.RasterXSize
|
||||
del dataset
|
||||
return width
|
||||
|
||||
def getDemHeight(self,outname):
|
||||
gdal.AllRegister()
|
||||
dataset=gdal.Open(outname)
|
||||
height=dataset.RasterYSize
|
||||
del dataset
|
||||
return height
|
||||
|
||||
def getGeotransform(self,outname):
|
||||
gdal.AllRegister()
|
||||
dataset=gdal.Open(outname)
|
||||
geotransform = dataset.GetGeoTransform()
|
||||
del dataset
|
||||
return geotransform
|
||||
|
||||
def createImage(self,outname):
|
||||
demImage = createDemImage()
|
||||
width = self.getDemWidth(outname)
|
||||
height=self.getDemHeight(outname)
|
||||
demImage.initImage(outname,'write',width,type="float")
|
||||
length = demImage.getLength()
|
||||
# 获取分辨率
|
||||
geotransform=self.getGeotransform(outname)
|
||||
|
||||
dictProp = {'METADATA_LOCATION':outname+'.xml','REFERENCE':self._reference,'Coordinate1':{'size':width,'startingValue':geotransform[0],'delta':geotransform[1]},'Coordinate2':{'size':length,'startingValue':geotransform[3],'delta':geotransform[5]},'FILE_NAME':outname}
|
||||
#no need to pass the dictionaryOfFacilities since init will use the default one
|
||||
demImage.init(dictProp)
|
||||
self._image = demImage
|
||||
return demImage
|
||||
|
||||
def setFillingValue(self,val):
|
||||
self._fillingValue = val
|
||||
|
||||
def setNoDataValue(self,val):
|
||||
self._NoDataValue = val
|
||||
|
||||
|
||||
def stitchDems(self,source, outname):
|
||||
import glob
|
||||
# 合并数据
|
||||
self.dem_merged(source, outname)
|
||||
self.createXmlMetadata(outname)
|
||||
family = 'DEM2ISCE'
|
||||
def __init__(self,family = '', name = ''):
|
||||
self._extension = '.tif'
|
||||
self._zip = '.zip'
|
||||
#to make it working with other urls, make sure that the second part of the url
|
||||
#it's /srtm/version2_1/SRTM(1,3)
|
||||
self._filters = {'region1':['Region'],'region3':['Africa','Australia','Eurasia','Islands','America'],'fileExtension':['.hgt.zip']}
|
||||
self._remove = ['.jpg']
|
||||
self._metadataFilename = 'fileDem.dem'
|
||||
self._createXmlMetadata = None
|
||||
self._createRscMetadata = None
|
||||
self._regionList = {'1':[],'3':[]}
|
||||
##self._keepDems = False
|
||||
self._fillingFilename = 'filling.hgt' # synthetic tile to cover holes
|
||||
##self._fillingValue = -32768 # fill the synthetic tile with this value
|
||||
##self._noFilling = False
|
||||
self._failed = 'failed'
|
||||
self._succeded = 'succeded'
|
||||
self._image = None
|
||||
self._reference = 'EGM96'
|
||||
|
||||
super(DEM2ISCE, self).__init__(family if family else self.__class__.family, name=name)
|
||||
# logger not defined until baseclass is called
|
||||
if not self.logger:
|
||||
self.logger = logging.getLogger('isce.contrib.demUtils.DEM2ISCE')
|
||||
|
||||
def getImage(self):
|
||||
return self._image
|
||||
|
||||
|
||||
# DEM转换主流程
|
||||
def processDEM2ISCE(name,source_path,target_path,fillvalue,noDataValue):
|
||||
ds = DEM2ISCE(name=name)
|
||||
# 构建
|
||||
ds.setFillingValue(fillvalue)
|
||||
ds.setNoDataValue(noDataValue)
|
||||
ds.stitchDems(source_path,target_path)
|
||||
|
||||
|
||||
def main():
|
||||
#if not argument provided force the --help flag
|
||||
if(len(sys.argv) == 1):
|
||||
sys.argv.append('-h')
|
||||
# Use the epilog to add usage examples
|
||||
epilog = '将格式为tif 的DEM 转换为ISCE 支持的DEM格式:\n\n'
|
||||
epilog += 'Usage examples:\n\n'
|
||||
epilog += 'DEM2ISCE.py -s /mnt/d/codestorage/isce2/青海省.tif -o /mnt/d/codestorage/isce2/青海省_wgs84 -fillvalue -9999 -Nodata -9999\n\n'
|
||||
|
||||
#set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog)
|
||||
parser.add_argument('-s', '--source', type = str, default ="/mnt/d/codestorage/isce2/青海省.tif", dest = 'source_path', help = '输入dem,格式为tif')
|
||||
parser.add_argument('-o', '--outpath', type = str, default = '/mnt/d/codestorage/isce2/青海省_wgs84', dest = 'outpath', help = '输出isce 支持的DEM ')
|
||||
parser.add_argument('-fillvalue', '--fillvalue', type = float, default = -9999, dest = 'fillvalue', help = '空值填充')
|
||||
parser.add_argument('-Nodata', '--Nodata', type = float, default = -9999, dest = 'Nodatavalue', help = '无效值填充')
|
||||
args = parser.parse_args()
|
||||
processDEM2ISCE("DEM2ISCE",args.source_path,args.outpath,args.fillvalue,args.Nodatavalue)
|
||||
return -1
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
|
@ -0,0 +1,228 @@
|
|||
#!/usr/bin/env python3
|
||||
import isce
|
||||
from lxml import objectify as OB
|
||||
from collections import OrderedDict
|
||||
from datetime import datetime, time
|
||||
import os
|
||||
import ConfigParser as CP
|
||||
import io
|
||||
from isceobj import Constants as Cn
|
||||
import numpy as np
|
||||
import ast
|
||||
|
||||
xmlTimeFormat = '%Y-%m-%d %H:%M:%S.%f'
|
||||
class insarProcXML(object):
|
||||
'''
|
||||
Read in the metadata files generated by ISCE and create ROI-PAC equivalents.
|
||||
'''
|
||||
|
||||
def __init__(self, xmlfile='insarProc.xml'):
|
||||
'''Constructor. Not much here.'''
|
||||
self.xmlfile = xmlfile
|
||||
fin = open(self.xmlfile)
|
||||
self.xml = OB.fromstring(fin.read())
|
||||
fin.close()
|
||||
|
||||
def raw_rsc(self, key=None, write=False):
|
||||
'''Write out the RSC files for Raw data.'''
|
||||
|
||||
if key not in ['reference', 'secondary']:
|
||||
raise ValueError('Raw Files can only be written for reference or secondary.')
|
||||
|
||||
|
||||
rsc = OrderedDict()
|
||||
|
||||
######Sequence similar to Envisat's raw.rsc file
|
||||
rsc['FIRST_FRAME'] = 0
|
||||
|
||||
#####Get Scene time
|
||||
root = getattr(self.xml, key)
|
||||
frame=root.frame
|
||||
sensmid = datetime.strptime(frame.SENSING_MID.text, xmlTimeFormat)
|
||||
sensstart = datetime.strptime(frame.SENSING_START.text, xmlTimeFormat)
|
||||
sensstop = datetime.strptime(frame.SENSING_STOP.text, xmlTimeFormat)
|
||||
|
||||
rsc['FIRST_FRAME_SCENE_CENTER_TIME'] = sensmid.strftime('%Y%m%d%H%M%S') + '{0:2d}'.format(int(sensmid.microsecond/1000.))
|
||||
|
||||
rsc['FIRST_FRAME_SCENE_CENTER_LINE'] = 0
|
||||
rsc['DATE'] = sensmid.strftime('%y%m%d')
|
||||
rsc['FIRST_LINE_YEAR'] = sensstart.strftime('%Y')
|
||||
rsc['FIRST_LINE_MONTH_OF_YEAR'] = sensstart.strftime('%m')
|
||||
rsc['FIRST_LINE_DAY_OF_MONTH'] = sensstart.strftime('%d')
|
||||
rsc['FIRST_CENTER_HOUR_OF_DAY'] = sensmid.strftime('%H')
|
||||
rsc['FIRST_CENTER_MN_OF_HOUR'] = sensmid.strftime('%M')
|
||||
rsc['FIRST_CENTER_S_OF_MN'] = sensmid.strftime('%S')
|
||||
rsc['FIRST_CENTER_MS_OF_S'] = int(round(sensmid.microsecond/1000.))
|
||||
|
||||
rsc['PROCESSING_FACILITY'] = frame.PROCESSING_FACILITY.text
|
||||
rsc['PROCESSING_SYSTEM'] = frame.PROCESSING_SYSTEM.text
|
||||
rsc['PROCESSING_SYSTEM_VERSION'] = frame.PROCESSING_SYSTEM_VERSION.text
|
||||
|
||||
######Platform information.
|
||||
instrument = root.instrument
|
||||
platform = "[platform]\n" + instrument.PLATFORM.text
|
||||
platform = platform.decode('string_escape')
|
||||
temp = CP.RawConfigParser()
|
||||
temp.readfp(io.BytesIO(platform))
|
||||
rsc['PLATFORM'] = temp.get('platform','Mission')[1:-1]
|
||||
rsc['ANTENNA_LENGTH'] = temp.get('platform', 'Antenna Length')[1:-1]
|
||||
rsc['ANTENNA_SIDE'] = temp.get('platform', 'Look Direction')[1:-1]
|
||||
|
||||
del temp
|
||||
rsc['ORBIT_NUMBER'] = frame.ORBIT_NUMBER.text
|
||||
rsc['STARTING_RANGE'] = frame.STARTING_RANGE.text
|
||||
rsc['ONE_WAY_DELAY'] = None #Undefined
|
||||
rsc['RANGE_PIXEL_SIZE'] = Cn.SPEED_OF_LIGHT
|
||||
|
||||
rsc['PRF'] = instrument.PRF.text
|
||||
rsc['FILE_LENGTH'] = int(frame.NUMBER_OF_LINES.text)
|
||||
rsc['WIDTH'] = int(frame.NUMBER_OF_SAMPLES.text)
|
||||
rsc['YMIN'] = 0
|
||||
rsc['YMAX'] = rsc['FILE_LENGTH']
|
||||
rsc['XMIN'] = 0 #Assuming no prior header bytes
|
||||
rsc['XMAX']= rsc['WIDTH']
|
||||
rsc['RANGE_SAMPLING_FREQUENCY'] = instrument.RANGE_SAMPLING_RATE.text
|
||||
|
||||
#####Get planet desciption
|
||||
planet = self.xml.planet
|
||||
rsc['PLANET_GM'] = planet.GM.text
|
||||
rsc['PLANET_SPINRATE'] = planet.SPINRATE.text
|
||||
|
||||
temp = sensstart - datetime.combine(sensstart.date(), time(0))
|
||||
rsc['FIRST_LINE_UTC'] = temp.total_seconds()
|
||||
|
||||
temp = sensmid - datetime.combine(sensmid.date(), time(0))
|
||||
rsc['CENTER_LINE_UTC'] = temp.total_seconds()
|
||||
|
||||
temp = sensstop - datetime.combine(sensstop.date(), time(0))
|
||||
rsc['LAST_LINE_UTC'] = temp.total_seconds()
|
||||
|
||||
root1 = getattr(self.xml.runEstimateHeights, 'CHV_'+key)
|
||||
rsc['HEIGHT'] = root1.outputs.HEIGHT.text
|
||||
rsc['VELOCITY'] = root1.outputs.VELOCITY.text
|
||||
|
||||
rsc['HEIGHT_DT'] = None #Undefined
|
||||
rsc['LATITUDE'] = None #Undefined
|
||||
rsc['LONGITUDE'] = None #Undefined
|
||||
rsc['EQUATORIAL_RADIUS'] = planet.ellipsoid.SEMIMAJOR_AXIS.text
|
||||
rsc['ECCENTRICITY_SQUARED'] = planet.ellipsoid.ECCENTRICITY_SQUARED.text
|
||||
rsc['EARTH_RADIUS'] = None
|
||||
rsc['FILE_START'] = 1
|
||||
rsc['WAVELENGTH'] = instrument.RADAR_WAVELENGTH.text
|
||||
rsc['PULSE_LENGTH'] = instrument.RANGE_PULSE_DURATION.text
|
||||
rsc['CHIRP_SLOPE'] = instrument.CHIRP_SLOPE.text
|
||||
rsc['I_BIAS'] = root.iBias.text
|
||||
rsc['Q_BIAS'] = root.qBias.text
|
||||
rsc['DOPPLER_RANGE0'] = None
|
||||
rsc['DOPPLER_RANGE1'] = None
|
||||
rsc['DOPPLER_RANGE2'] = None
|
||||
rsc['DOPPLER_RANGE3'] = None
|
||||
rsc['SQUINT'] = None #Could be 0. never used
|
||||
rsc['ROI_PAC_VERSION'] = 3
|
||||
|
||||
if write:
|
||||
outfilename = root.sensor.OUTPUT + '.rsc'
|
||||
fid = open(outfilename, 'w')
|
||||
|
||||
for kk, vv in rsc.iteritems():
|
||||
fid.write('{0:<40} {1:<40}\n'.format(kk,vv))
|
||||
|
||||
fid.close()
|
||||
|
||||
return rsc
|
||||
|
||||
|
||||
def slc_rsc(self, key=None, raw=None, write=False):
|
||||
'''
|
||||
Create rsc files for all the interferograms generated by ISCE.
|
||||
'''
|
||||
|
||||
if key not in ['reference', 'secondary']:
|
||||
raise ValueError('SLC files can only be written for reference or secondary.')
|
||||
|
||||
if raw is None:
|
||||
rsc = self.raw_rsc(key=key, write=False)
|
||||
else:
|
||||
rsc = raw
|
||||
|
||||
root = getattr(self.xml, key)
|
||||
rootslc = getattr(self.xml.runFormSLC, key)
|
||||
|
||||
#####Values that have changed.
|
||||
rsc['RAW_DATA_RANGE'] = rsc['STARTING_RANGE']
|
||||
rsc['STARTING_RANGE'] = rootslc.outputs.STARTING_RANGE.text
|
||||
rsc['FILE_LENGTH'] = None #Needs to be output
|
||||
rsc['WIDTH'] = int(rootslc.outputs.SLC_WIDTH.text)
|
||||
rsc['XMIN'] = 0
|
||||
rsc['XMAX'] = rsc['WIDTH']
|
||||
rsc['YMIN'] = 0
|
||||
rsc['YMAX'] = None
|
||||
rsc['FIRST_LINE_UTC'] = None
|
||||
rsc['CENTER_LINE_UTC'] = None
|
||||
rsc['LAST_LINE_UTC'] = None
|
||||
rsc['HEIGHT'] = rootslc.inputs.SPACECRAFT_HEIGHT.text
|
||||
rsc['HEIGHT_DT'] = None
|
||||
rsc['VELOCITY'] = rootslc.inputs.BODY_FIXED_VELOCITY.text
|
||||
rsc['LATITUDE'] = None
|
||||
rsc['LONGITUDE'] = None
|
||||
#rsc['HEADING'] = float(self.xml.getpeg.outputs.PEG_HEADING)*180.0/np.pi
|
||||
rsc['HEADING'] = None #Verify the source
|
||||
rsc['EARTH_RADIUS'] = rootslc.inputs.PLANET_LOCAL_RADIUS.text
|
||||
dop =ast.literal_eval(rootslc.inputs.DOPPLER_CENTROID_COEFFICIENTS.text)
|
||||
rsc['DOPPLER_RANGE0'] = dop[0]
|
||||
rsc['DOPPLER_RANGE1'] = None #Check units per meter / per pixel
|
||||
rsc['DOPPLER_RANGE2'] = None
|
||||
rsc['DOPPLER_RANGE3'] = None
|
||||
|
||||
rsc['DELTA_LINE_UTC'] = None
|
||||
rsc['AZIMUTH_PIXEL_SIZE'] = None
|
||||
rsc['RANGE_PIXEL_SIZE'] = None
|
||||
rsc['RANGE_OFFSET'] = None
|
||||
rsc['RLOOKS'] = 1
|
||||
rsc['ALOOKS'] = 1
|
||||
rsc['PEG_UTC'] = 1
|
||||
rsc['HEIGHT_DS'] = None
|
||||
rsc['HEIGHT_DDS'] = None
|
||||
rsc['CROSSTRACK_POS'] = None
|
||||
rsc['CROSSTRACK_POS_DS'] = None
|
||||
rsc['CROSSTRACK_POS_DDS'] = None
|
||||
rsc['VELOCITY_S'] = None
|
||||
rsc['VELOCITY_C'] = None
|
||||
rsc['VELOCITY_H'] = None
|
||||
rsc['ACCELERATION_S'] = None
|
||||
rsc['ACCELERATION_C'] = None
|
||||
rsc['ACCELERATION_H'] = None
|
||||
rsc['VERT_VELOCITY'] = None
|
||||
rsc['VERT_VELOCITY_DS'] = None
|
||||
rsc['CROSSTRACK_VELOCITY'] = None
|
||||
rsc['CROSSTRACK_VELOCITY_DS'] = None
|
||||
rsc['ALONGTRACK_VELOCITY'] = None
|
||||
rsc['ALONGTRACK_VELOCITY_DS'] = None
|
||||
rsc['PEG_UTC'] = None
|
||||
rsc['SQUINT'] = None
|
||||
|
||||
if write:
|
||||
outfilename = os.path.splitext(root.sensor.OUTPUT.text)[0]+'.slc.rsc'
|
||||
|
||||
fid = open(outfilename, 'w')
|
||||
|
||||
for kk, vv in rsc.iteritems():
|
||||
fid.write('{0:<40} {1:<40}\n'.format(kk,vv))
|
||||
|
||||
fid.close()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''Run the test on input xml file.'''
|
||||
|
||||
converter = insarProcXML()
|
||||
reference_raw_rsc = converter.raw_rsc(key='reference', write=True)
|
||||
secondary_raw_rsc = converter.raw_rsc(key='secondary', write=True)
|
||||
|
||||
reference_slc_rsc = converter.slc_rsc(raw=reference_raw_rsc, key='reference', write=True)
|
||||
secondary_slc_rsc = converter.slc_rsc(raw=secondary_raw_rsc, key='secondary', write=True)
|
||||
|
|
@ -0,0 +1,266 @@
|
|||
#!/usr/bin/env python3
|
||||
from __future__ import print_function
|
||||
import argparse
|
||||
import isce
|
||||
from make_raw import makeRawApp
|
||||
import numpy as np
|
||||
import os
|
||||
import itertools
|
||||
from isceobj.XmlUtil.XmlUtil import XmlUtil
|
||||
from isceobj.Orbit.Orbit import Orbit, StateVector
|
||||
from iscesys.StdOEL.StdOELPy import create_writer
|
||||
#import sarxml
|
||||
import stdproc
|
||||
import datetime
|
||||
|
||||
|
||||
|
||||
stdWriter = create_writer("log", "", True, filename="prepareStack.log")
|
||||
|
||||
def pulseTiming(frame):
|
||||
#From runPulseTiming() in InsarProc
|
||||
numberOfLines = frame.getNumberOfLines()
|
||||
prf = frame.getInstrument().getPulseRepetitionFrequency()
|
||||
pri = 1.0 / prf
|
||||
startTime = frame.getSensingStart()
|
||||
orbit = frame.getOrbit()
|
||||
|
||||
pulseOrbit = Orbit()
|
||||
startTimeUTC0 = (startTime - datetime.datetime(startTime.year,startTime.month,startTime.day))
|
||||
timeVec = [pri*i + startTimeUTC0.seconds + 10**-6*startTimeUTC0.microseconds for i in xrange(numberOfLines)]
|
||||
for i in range(numberOfLines):
|
||||
dt = i * pri
|
||||
time = startTime + datetime.timedelta(seconds=dt)
|
||||
sv = orbit.interpolateOrbit(time, method='hermite')
|
||||
pulseOrbit.addStateVector(sv)
|
||||
|
||||
return pulseOrbit
|
||||
|
||||
def getPeg(planet, orbit):
|
||||
#Returns relevant peg point. From runSetMocompPath.py
|
||||
|
||||
objPeg = stdproc.createGetpeg()
|
||||
objPeg.wireInputPort(name='planet', object=planet)
|
||||
objPeg.wireInputPort(name='Orbit', object=orbit)
|
||||
|
||||
stdWriter.setFileTag("getpeg", "log")
|
||||
stdWriter.setFileTag("getpeg", "err")
|
||||
stdWriter.setFileTag("getpeg", "out")
|
||||
# objSetmocomppath.setStdWriter(self._stdWriter)
|
||||
objPeg.setStdWriter(stdWriter)
|
||||
objPeg.estimatePeg()
|
||||
|
||||
return objPeg.getPeg(), objPeg.getAverageHeight()
|
||||
|
||||
class orbit_info:
|
||||
def __init__(self, sar, fname):
|
||||
'''Initialize with a sarProc object and corresponding XML file name'''
|
||||
orbit = pulseTiming(sar.make_raw.frame)
|
||||
tim, pos, vel, offset = orbit._unpackOrbit()
|
||||
planet = sar.make_raw.planet
|
||||
self.tim = tim
|
||||
self.pos = pos
|
||||
self.vel = vel
|
||||
self.dt = sar.make_raw.frame.sensingMid
|
||||
self.prf = sar.make_raw.doppler.prf
|
||||
self.fd = sar.make_raw.dopplerValues() * self.prf
|
||||
self.nvec = len(self.tim)
|
||||
self.peg, self.hgt = getPeg(planet, orbit)
|
||||
self.rds = self.peg.getRadiusOfCurvature()
|
||||
self.rng = sar.make_raw.frame.startingRange
|
||||
self.clook = None
|
||||
self.slook = None
|
||||
self.filename = fname
|
||||
self.computeLookAngle()
|
||||
|
||||
def computeLookAngle(self):
|
||||
self.clook = (2*self.hgt*self.rds+self.hgt**2+self.rng**2)/(2*self.rng*(self.rds+self.hgt))
|
||||
self.slook = np.sqrt(1-self.clook**2)
|
||||
# print('Estimated Look Angle: %3.2f degrees'%(np.arccos(self.clook)*180.0/np.pi))
|
||||
|
||||
def getBaseline(self, secondary):
|
||||
'''Compute baseline between current object and another orbit object.'''
|
||||
|
||||
ind = int(self.nvec/2)
|
||||
|
||||
mpos = np.array(self.pos[ind])
|
||||
mvel = np.array(self.vel[ind])
|
||||
|
||||
#######From the ROI-PAC scripts
|
||||
rvec = mpos/np.linalg.norm(mpos)
|
||||
crp = np.cross(rvec, mvel)/np.linalg.norm(mvel)
|
||||
crp = crp/np.linalg.norm(crp)
|
||||
vvec = np.cross(crp, rvec)
|
||||
mvel = np.linalg.norm(mvel)
|
||||
|
||||
ind = int(secondary.nvec/2) #First guess
|
||||
spos = np.array(secondary.pos[ind])
|
||||
svel = np.array(secondary.vel[ind])
|
||||
svel = np.linalg.norm(svel)
|
||||
|
||||
dx = spos - mpos;
|
||||
z_offset = secondary.prf*np.dot(dx, vvec)/mvel
|
||||
|
||||
ind = int(ind - z_offset) #Refined estimate
|
||||
spos = secondary.pos[ind]
|
||||
svel = secondary.vel[ind]
|
||||
svel = np.linalg.norm(svel)
|
||||
|
||||
dx = spos-mpos
|
||||
hb = np.dot(dx, crp)
|
||||
vb = np.dot(dx, rvec)
|
||||
|
||||
csb = -1.0*hb*self.clook + vb*self.slook
|
||||
|
||||
# print('Estimated Baseline: %4.2f'%csb)
|
||||
return csb
|
||||
|
||||
|
||||
def parse():
|
||||
|
||||
# class RangeObj(object):
|
||||
# '''Class to deal with input ranges.'''
|
||||
# def __init__(self, start, end):
|
||||
# self.start = start
|
||||
# self.end = end
|
||||
# def __eq__(self, other):
|
||||
# return self.start <= other <= self.end
|
||||
|
||||
|
||||
def Range(nmin, nmax):
|
||||
class RangeObj(argparse.Action):
|
||||
def __call__(self, parser, args, values, option_string=None):
|
||||
if not nmin <= values <= nmax:
|
||||
msg = 'Argument "{f}" requires value between {nmin} and {nmax}'.format(f=self.dest, nmin=nmin, nmax=nmax)
|
||||
raise argparse.ArgumentTypeError(msg)
|
||||
setattr(args, self.dest, values)
|
||||
|
||||
return RangeObj
|
||||
|
||||
#####Actual parser set up
|
||||
parser = argparse.ArgumentParser(description='Computes the baseline plot for given set of SAR images.')
|
||||
parser.add_argument('fnames', nargs='+', default=None, help = 'XML files corresponding to the SAR scenes.')
|
||||
parser.add_argument('-Bcrit', dest='Bcrit', default=1200.0, help='Critical Geometric Baseline in meters [0., 10000.]', type=float, action=Range(0., 10000.))
|
||||
parser.add_argument('-Tau', dest='Tau', default=1080.0, help='Temporal Decorrelation Time Constant in days [0., 3650.]', type=float, action=Range(0., 3650.))
|
||||
parser.add_argument('-dop', dest='dop', default=0.5, help='Critical Doppler difference in fraction of PRF', type=float, action=Range(0., 1.))
|
||||
parser.add_argument('-coh', dest='cThresh', default=0.3, help='Coherence Threshold to estimate viable interferograms. [0., 1.0]', type=float, action=Range(0., 1.))
|
||||
parser.add_argument('-dir', dest='dirname', default='insar_XML', help='Directory in which the individual insar XML files are created.', type=str, action='store')
|
||||
parser.add_argument('-base', dest='base', default='base.xml', help='Base XML for the insar.xml files.', type=str)
|
||||
inps = parser.parse_args()
|
||||
|
||||
return inps
|
||||
|
||||
if __name__ == '__main__':
|
||||
inps = parse()
|
||||
nSar = len(inps.fnames)
|
||||
print(inps.fnames)
|
||||
print('Number of SAR Scenes = %d'%nSar)
|
||||
|
||||
Orbits = []
|
||||
print('Reading in all the raw files and metadata.')
|
||||
for k in xrange(nSar):
|
||||
sar = makeRawApp()
|
||||
sar.run(inps.fnames[k])
|
||||
Orbits.append(orbit_info(sar, inps.fnames[k]))
|
||||
|
||||
##########We now have all the pegpoints to start processing.
|
||||
Dopplers = np.zeros(nSar)
|
||||
Bperp = np.zeros(nSar)
|
||||
Days = np.zeros(nSar)
|
||||
|
||||
#######Setting the first scene as temporary reference.
|
||||
reference = Orbits[0]
|
||||
|
||||
|
||||
Dopplers[0] = reference.fd
|
||||
Days[0] = reference.dt.toordinal()
|
||||
for k in xrange(1,nSar):
|
||||
secondary = Orbits[k]
|
||||
Bperp[k] = reference.getBaseline(secondary)
|
||||
Dopplers[k] = secondary.fd
|
||||
Days[k] = secondary.dt.toordinal()
|
||||
|
||||
|
||||
print("************************************")
|
||||
print("Index Date Bperp Doppler")
|
||||
print("************************************")
|
||||
|
||||
for k in xrange(nSar):
|
||||
print('{0:>3} {1:>10} {2:4.2f} {3:4.2f}'.format(k+1, Orbits[k].dt.strftime('%Y-%m-%d'), Bperp[k],Dopplers[k]))
|
||||
|
||||
|
||||
print("************************************")
|
||||
|
||||
geomRho = (1-np.clip(np.abs(Bperp[:,None]-Bperp[None,:])/inps.Bcrit, 0., 1.))
|
||||
tempRho = np.exp(-1.0*np.abs(Days[:,None]-Days[None,:])/inps.Tau)
|
||||
dopRho = (np.abs(Dopplers[:,None] - Dopplers[None,:])/ reference.prf) < inps.dop
|
||||
|
||||
Rho = geomRho * tempRho * dopRho
|
||||
for kk in xrange(nSar):
|
||||
Rho[kk,kk] = 0.
|
||||
|
||||
|
||||
avgRho = np.mean(Rho, axis=1)*nSar/(nSar-1)
|
||||
numViable = np.sum((Rho> inps.cThresh), axis=1)
|
||||
|
||||
####Currently sorting on average coherence.
|
||||
|
||||
referenceChoice = np.argsort(avgRho)
|
||||
referenceOrbit = Orbits[referenceChoice[0]]
|
||||
referenceBperp = Bperp[referenceChoice[0]]
|
||||
|
||||
|
||||
print('*************************************')
|
||||
print('Ranking for Reference Scene Selection: ')
|
||||
print('**************************************')
|
||||
print('Rank Index Date nViable Avg. Coh.' )
|
||||
for kk in xrange(nSar):
|
||||
ind = referenceChoice[kk]
|
||||
print('{0:>3} {1:>3} {2:>10} {3:>4} {4:>2.3f}'.format(kk+1, ind+1, Orbits[ind].dt.strftime('%Y-%m-%d'), numViable[ind], avgRho[ind]))
|
||||
|
||||
print('***************************************')
|
||||
|
||||
print('***************************************')
|
||||
print('List of Viable interferograms:')
|
||||
print('***************************************')
|
||||
|
||||
# if not os.path.isdir(inps.dirname):
|
||||
# try:
|
||||
# os.mkdir(inps.dirname)
|
||||
# except:
|
||||
# raise OSError("%s Directory cannot be created"%(inps.dirname))
|
||||
|
||||
|
||||
|
||||
[ii,jj] = np.where(Rho > inps.cThresh)
|
||||
|
||||
print('Reference Secondary Bperp Deltat')
|
||||
for mind, sind in itertools.izip(ii,jj):
|
||||
reference = Orbits[mind]
|
||||
secondary = Orbits[sind]
|
||||
if reference.dt > secondary.dt:
|
||||
print('{0:>10} {1:>10} {2:>4.2f} {3:>4.2f}'.format(reference.dt.strftime('%Y-%m-%d'), secondary.dt.strftime('%Y-%m-%d'), Bperp[mind]-Bperp[sind], Days[mind] - Days[sind]))
|
||||
xmlname = '%s/insar_%s_%s.xml'%(inps.dirname, reference.dt.strftime('%Y%m%d'), secondary.dt.strftime('%Y%m%d'))
|
||||
|
||||
# sarxml.sartoinsarXML(reference.filename, secondary.filename, base=inps.base, out=xmlname)
|
||||
|
||||
|
||||
print('***************************************')
|
||||
|
||||
#######Currently picks reference peg point.
|
||||
print('***************************************')
|
||||
commonPeg = referenceOrbit.peg
|
||||
print('Common peg point: ')
|
||||
print(commonPeg)
|
||||
print('Bperp Range: [%f , %f] '%(Bperp.min()-referenceBperp, Bperp.max()-referenceBperp))
|
||||
|
||||
######Choose median doppler
|
||||
commonDop = np.median(Dopplers)
|
||||
maxDop = np.max(Dopplers)
|
||||
minDop = np.min(Dopplers)
|
||||
varDop = np.max(np.abs(Dopplers-commonDop))/referenceOrbit.prf
|
||||
|
||||
print('Common Doppler: ', commonDop)
|
||||
print('Doppler Range: [%f, %f]'%(minDop, maxDop))
|
||||
print('MAx Doppler Variation = %f %%'%(varDop*100))
|
||||
print('******************************************')
|
||||
|
|
@ -0,0 +1,87 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
Import('env')
|
||||
envapplications = env.Clone()
|
||||
package = 'applications'
|
||||
envapplications['PACKAGE'] = package
|
||||
envapplications['INSTALL_PATH'] = os.path.join(
|
||||
envapplications['PRJ_SCONS_INSTALL'], package
|
||||
)
|
||||
Export('envapplications')
|
||||
|
||||
install = envapplications['INSTALL_PATH']
|
||||
helpList,installHelp = envapplications['HELP_BUILDER'](envapplications,'__init__.py',install)
|
||||
envapplications.Install(installHelp,helpList)
|
||||
envapplications.Alias('install',installHelp)
|
||||
|
||||
listFiles = ['mdx.py',
|
||||
# 'PrepareStack.py',
|
||||
'insarApp.py',
|
||||
'stripmapApp.py',
|
||||
'topsApp.py',
|
||||
# 'topsOffsetApp.py',
|
||||
# 'xmlGenerator.py',
|
||||
# 'dpmApp.py',
|
||||
# 'CalculatePegPoint.py',
|
||||
# 'calculateBaseline.py',
|
||||
# 'extractHDROrbit.py',
|
||||
# 'formSLC.py',
|
||||
# 'viewMetadata.py',
|
||||
'rtcApp.py',
|
||||
'make_raw.py',
|
||||
'__init__.py',
|
||||
'isceApp.py',
|
||||
'stitcher.py',
|
||||
'dem.py',
|
||||
'demdb.py',
|
||||
'wbdStitcher.py',
|
||||
'upsampleDem.py',
|
||||
'iscehelp.py',
|
||||
'imageMath.py',
|
||||
'waterMask.py',
|
||||
'looks.py',
|
||||
'isce2gis.py',
|
||||
'fixImageXml.py',
|
||||
'isce2geotiff.py',
|
||||
'dataTileManager.py',
|
||||
'wbd.py',
|
||||
'downsampleDEM.py',
|
||||
'gdal2isce_xml.py',
|
||||
'alos2App.py',
|
||||
'alos2burstApp.py']
|
||||
# 'isce2he5.py']
|
||||
|
||||
envapplications.Install(install, listFiles)
|
||||
envapplications.Alias('install', install)
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
## The appications:
|
||||
__all__ = ['CalculatePegPoint',
|
||||
'calculateBaseline',
|
||||
'createGeneric',
|
||||
'dpmApp',
|
||||
'extractHDROrbit',
|
||||
'focus',
|
||||
'formSLC',
|
||||
'insarApp',
|
||||
'isce.log',
|
||||
'make_input',
|
||||
'make_raw',
|
||||
'mdx',
|
||||
'readdb',
|
||||
'viewMetadata',
|
||||
'xmlGenerator']
|
||||
def createInsar():
|
||||
from .insarApp import Insar
|
||||
return Insar()
|
||||
def createStitcher():
|
||||
from .stitcher import Stitcher
|
||||
return Stitcher()
|
||||
def createWbdStitcher():
|
||||
from .wbdStitcher import Stitcher
|
||||
return Stitcher()
|
||||
def createDataTileManager():
|
||||
from .dataTileManager import DataTileManager
|
||||
return DataTileManager()
|
||||
def getFactoriesInfo():
|
||||
return {'Insar':
|
||||
{
|
||||
'factory':'createInsar'
|
||||
},
|
||||
'DemsStitcher':
|
||||
{
|
||||
'factory':'createStitcher'
|
||||
},
|
||||
'WbdsStitcher':
|
||||
{
|
||||
'factory':'createWbdStitcher'
|
||||
},
|
||||
'DataTileManager':
|
||||
{
|
||||
'factory':'createDataTileManager'
|
||||
}
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,81 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
from isce import logging
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
from mroipac.baseline.Baseline import Baseline
|
||||
|
||||
class calculateBaselineApp(FactoryInit):
|
||||
|
||||
def main(self):
|
||||
referenceFrame = self.populateFrame(self.referenceObj)
|
||||
secondaryFrame = self.populateFrame(self.secondaryObj)
|
||||
|
||||
# Calculate the baseline information
|
||||
baseline = Baseline()
|
||||
baseline.wireInputPort(name='referenceFrame',object=referenceFrame)
|
||||
baseline.wireInputPort(name='secondaryFrame',object=secondaryFrame)
|
||||
baseline.wireInputPort(name='referenceOrbit',object=referenceFrame.getOrbit())
|
||||
baseline.wireInputPort(name='secondaryOrbit',object=secondaryFrame.getOrbit())
|
||||
baseline.wireInputPort(name='ellipsoid',object=referenceFrame.getInstrument().getPlatform().getPlanet().get_elp())
|
||||
baseline.baseline()
|
||||
print(baseline)
|
||||
|
||||
def populateFrame(self,sensorObj):
|
||||
# Parse the image metadata and extract the image
|
||||
self.logger.info('Parsing image metadata')
|
||||
sensorObj.parse()
|
||||
frame = sensorObj.getFrame()
|
||||
|
||||
# Calculate the height, height_dt, and velocity
|
||||
self.logger.info("Calculating Spacecraft Velocity")
|
||||
frame.calculateHeightDt()
|
||||
frame.calculateVelocity()
|
||||
|
||||
return frame
|
||||
|
||||
def __init__(self,arglist):
|
||||
FactoryInit.__init__(self)
|
||||
self.initFactory(arglist)
|
||||
self.referenceObj = self.getComponent('Reference')
|
||||
self.secondaryObj = self.getComponent('Secondary')
|
||||
self.logger = logging.getLogger('isce.calculateBaseline')
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if (len(sys.argv) < 2):
|
||||
print("Usage:%s <xml-parameter file>" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
runObj = calculateBaselineApp(sys.argv[1:])
|
||||
runObj.main()
|
||||
|
|
@ -0,0 +1,90 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
from isce import logging
|
||||
import isceobj
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
|
||||
class ToGeneric(object):
|
||||
# Convert from a satellite-specific format, to a generic HDF5-based format.
|
||||
|
||||
def __init__(self,rawObj=None):
|
||||
self.rawObj = rawObj
|
||||
self.logger = logging.getLogger('isce.toGeneric')
|
||||
|
||||
def convert(self):
|
||||
from isceobj.Sensor.Generic import Generic
|
||||
doppler = isceobj.Doppler.useDOPIQ()
|
||||
hhRaw = self.make_raw(self.rawObj,doppler)
|
||||
hhRaw.getFrame().getImage().createImage()
|
||||
|
||||
writer = Generic()
|
||||
writer.frame = hhRaw.getFrame()
|
||||
writer.write('test.h5',compression='gzip')
|
||||
|
||||
def make_raw(self,sensor,doppler):
|
||||
"""
|
||||
Extract the unfocused SAR image and associated data
|
||||
|
||||
@param sensor (\a isceobj.Sensor) the sensor object
|
||||
@param doppler (\a isceobj.Doppler) the doppler object
|
||||
@return (\a make_raw) a make_raw instance
|
||||
"""
|
||||
from make_raw import make_raw
|
||||
import stdproc
|
||||
import isceobj
|
||||
|
||||
# Extract raw image
|
||||
self.logger.info("Creating Raw Image")
|
||||
mr = make_raw()
|
||||
mr.wireInputPort(name='sensor',object=sensor)
|
||||
mr.wireInputPort(name='doppler',object=doppler)
|
||||
mr.make_raw()
|
||||
|
||||
return mr
|
||||
|
||||
def main():
|
||||
import sys
|
||||
import isceobj
|
||||
|
||||
fi = FactoryInit()
|
||||
fi.fileInit = sys.argv[1]
|
||||
fi.defaultInitModule = 'InitFromXmlFile'
|
||||
fi.initComponentFromFile()
|
||||
|
||||
reference = fi.getComponent('Reference')
|
||||
|
||||
toGeneric = ToGeneric(rawObj=reference)
|
||||
toGeneric.convert()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,166 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import isce
|
||||
import logging
|
||||
import logging.config
|
||||
from iscesys.Component.Application import Application
|
||||
from iscesys.Component.Component import Component
|
||||
import os
|
||||
DATA_SOURCE = Component.Parameter('_dataSource',
|
||||
public_name='dataSource',
|
||||
default = '',
|
||||
type = str,
|
||||
mandatory = True,
|
||||
doc = "Data source such as dem1 (3o m resolution), dem3 (90 m resolution) \n" +\
|
||||
"or wbd for water body mask")
|
||||
ACTION = Component.Parameter('_action',
|
||||
public_name='action',
|
||||
default = 'stitch',
|
||||
type = str,
|
||||
mandatory = False,
|
||||
doc = "Action to be performed: stitch, download or stitchcorrect"
|
||||
)
|
||||
BBOX = Component.Parameter('_bbox',
|
||||
public_name='bbox',
|
||||
default = [],
|
||||
container=list,
|
||||
type = float,
|
||||
mandatory = False,
|
||||
doc = "Defines the spatial region in the format south north west east.\n" + \
|
||||
"The values should be from (-90,90) for latitudes and (-180,180) for longitudes.")
|
||||
PAIRS = Component.Parameter('_pairs',
|
||||
public_name='pairs',
|
||||
default = [],
|
||||
container=list,
|
||||
type = float,
|
||||
mandatory = False,
|
||||
doc = "Set of latitude and longitude pairs for which action = 'download' is performed.\n" +\
|
||||
"The format is [lat0,lon0,lat1,lon1,...,latn,lonn ].\n" +\
|
||||
"The values should be from (-90,90) for latitudes and (-180,180) for longitudes")
|
||||
MANAGER = Application.Facility(
|
||||
'_manager',
|
||||
public_name='manager',
|
||||
module='iscesys.DataManager',
|
||||
factory='createManager',
|
||||
mandatory=False,
|
||||
args=(DATA_SOURCE,),
|
||||
doc="Factory to instantiate the tile manager based on the DATA_SOURCE value"
|
||||
)
|
||||
class DataTileManager(Application):
|
||||
def main(self):
|
||||
if(self._action == 'stitch' or self._action == 'stitchcorrect'):
|
||||
if(self._bbox):
|
||||
lat = self._bbox[0:2]
|
||||
lon = self._bbox[2:4]
|
||||
if not(self.manager.stitch(lat,lon)):
|
||||
print('Could not create a stitched file. Some tiles are missing')
|
||||
if(self.action == 'stitchcorrect'):
|
||||
self.manager.correct()
|
||||
else:
|
||||
print('Error. The bbox parameter must be specified when action is stitch')
|
||||
raise ValueError
|
||||
|
||||
elif(self.action == 'download'):
|
||||
if(self._bbox):
|
||||
lat = self._bbox[0:2]
|
||||
lon = self._bbox[2:4]
|
||||
fromBounds = True
|
||||
elif(self._pairs):
|
||||
lat = self._pairs[::2]
|
||||
lon = self._pairs[1::2]
|
||||
fromBounds = False
|
||||
if(not (self._bbox or self._pairs)):
|
||||
print('Error. Either the bbox or the pairs parameters must be specified when action is download')
|
||||
raise ValueError
|
||||
self.manager.download(lat,lon,fromBounds)
|
||||
|
||||
else:
|
||||
print('Unrecognized action',self._action)
|
||||
return
|
||||
|
||||
def Usage(self):
|
||||
print("\nUsage: dataTileManager.py input.xml\n")
|
||||
print("NOTE: if you don't want to store your password in a file you can run it as\n" +\
|
||||
"'dataTileManager.py input.xml dataTileManager.manager.username=yourUsername\n" +\
|
||||
"dataTileManager.manager.password=yourPassword'\n\n" )
|
||||
|
||||
family = 'datatilemanager'
|
||||
|
||||
parameter_list = (
|
||||
DATA_SOURCE,
|
||||
ACTION,
|
||||
PAIRS,
|
||||
BBOX
|
||||
)
|
||||
facility_list = (MANAGER,)
|
||||
|
||||
@property
|
||||
def manager(self):
|
||||
return self._manager
|
||||
@manager.setter
|
||||
def manager(self,val):
|
||||
self._manager = val
|
||||
@property
|
||||
def action(self):
|
||||
return self._action
|
||||
@action.setter
|
||||
def action(self,val):
|
||||
self._action = val
|
||||
@property
|
||||
def dataSource(self):
|
||||
return self._dataSource
|
||||
@dataSource.setter
|
||||
def dataSource(self,val):
|
||||
self._dataSource = val
|
||||
@property
|
||||
def pairs(self):
|
||||
return self._pairs
|
||||
@pairs.setter
|
||||
def pairs(self,val):
|
||||
self._pairs = val
|
||||
@property
|
||||
def bbox(self):
|
||||
return self._bbox
|
||||
@bbox.setter
|
||||
def bbox(self,val):
|
||||
self._bbox = val
|
||||
def __init__(self,family = '', name = ''):
|
||||
super(DataTileManager, self).__init__(family if family else self.__class__.family, name=name)
|
||||
self._test = None
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
dt = DataTileManager()
|
||||
dt.configure()
|
||||
dt.run()
|
||||
|
||||
|
|
@ -0,0 +1,171 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
import isce
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
from contrib.demUtils import createDemStitcher
|
||||
|
||||
|
||||
def main():
|
||||
#if not argument provided force the --help flag
|
||||
if(len(sys.argv) == 1):
|
||||
sys.argv.append('-h')
|
||||
|
||||
# Use the epilog to add usage examples
|
||||
epilog = 'Usage examples:\n\n'
|
||||
epilog += 'Stitch (-a stitch) 1 arcsec dems (-s 1) in the bounding region 31 33 -114 -112 using the url (-u) and the log in credentials provided (-n,-w).\n'
|
||||
epilog += 'Create a rsc metadata file (-m) and report the download results (-r)\n'
|
||||
epilog += 'dem.py -a stitch -b 31 33 -114 -112 -s 1 -m rsc -r -n your_username -w your_password -u https://aria-alt-dav.jpl.nasa.gov/repository/products/SRTM1_v3/ \n\n'
|
||||
epilog += 'Download (-a download) the 3 arcsec (-s 3) whose lat/lon are 31 -114 and 31 -115 (-p)\n'
|
||||
epilog += 'dem.py -a download -p 31 -114 31 -115 -s 3 \n\n'
|
||||
epilog += 'Stitch the requested files and apply EGM96 -> WGS84 correction (-c)\n'
|
||||
epilog += 'dem.py -a stitch -b 31 33 -114 -113 -r -s 1 -c\n\n'
|
||||
epilog += 'Download from bounding boxes (-b)\n'
|
||||
epilog += 'dem.py -a download -b 31 33 -114 -113 -r -s 1\n\n'
|
||||
epilog += 'Stitch the files in the local directory (-l) in the bounding region provided keeping the\n'
|
||||
epilog += 'zip files after stitching (-k)\n'
|
||||
epilog += 'dem.py -a stitch -b 31 33 -114 -113 -k -r -l -s 1\n\n'
|
||||
|
||||
#set the formatter_class=argparse.RawDescriptionHelpFormatter otherwise it splits the epilog lines with its own default format
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog)
|
||||
|
||||
parser.add_argument('-a', '--action', type = str, default = 'stitch', dest = 'action', help = 'Possible actions: stitch or download (default: %(default)s). ')
|
||||
parser.add_argument('-c', '--correct', action = 'store_true', dest = 'correct', help = 'Apply correction EGM96 -> WGS84 (default: %(default)s). The output metadata is in xml format only')
|
||||
parser.add_argument('-m', '--meta', type = str, default = 'xml', dest = 'meta', help = 'What type of metadata file is created. Possible values: \
|
||||
xml or rsc (default: %(default)s)')
|
||||
parser.add_argument('-s', '--source', type = int, default = 1, dest = 'source', help = 'Dem SRTM source. Possible values 1 or 3 (default: %(default)s)')
|
||||
parser.add_argument('-f', '--filling', action = 'store_true', dest = 'filling', help = 'Flag to instruct to fill missing Dems with null values \
|
||||
(default null value -32768. Use -v or --filling_value option to change it)')
|
||||
parser.add_argument('-v', '--filling_value', type = int, default = -32768, dest = 'fillingValue', help = 'Value used to fill missing Dems (default: %(default)s)')
|
||||
parser.add_argument('-b', '--bbox', type = int, default = None, nargs = '+', dest = 'bbox', help = 'Defines the spatial region in the format south north west east.\
|
||||
The values should be integers from (-90,90) for latitudes and (0,360) or (-180,180) for longitudes.')
|
||||
parser.add_argument('-p', '--pairs', type = int, default = None, nargs = '+', dest = 'pairs', help = 'Set of latitude and longitude pairs for which --action = download is performed.\
|
||||
The values should be integers from (-90,90) for latitudes and (0,360) or (-180,180) for longitudes')
|
||||
parser.add_argument('-k', '--keep', action = 'store_true', dest = 'keep', help = 'If the option is present then the single files used for stitching are kept. If -l or --local is specified than the flag is automatically set (default: %(default)s)')
|
||||
parser.add_argument('-r', '--report', action = 'store_true', dest = 'report', help = 'If the option is present then failed and succeeded downloads are printed (default: %(default)s)')
|
||||
parser.add_argument('-l', '--local', action = 'store_true', dest = 'local', help = 'If the option is present then use the files that are in the location \
|
||||
specified by --dir. If not present --dir indicates the directory where the files are downloaded (default: %(default)s)')
|
||||
parser.add_argument('-d', '--dir', type = str, dest = 'dir', default = './', help = 'If used in conjunction with --local it specifies the location where the DEMs are located \
|
||||
otherwise it specifies the directory where the DEMs are downloaded and the stitched DEM is generated (default: %(default)s)')
|
||||
|
||||
parser.add_argument('-o', '--output', type = str, dest = 'output', default = None, help = 'Name of the output file to be created in --dir. If not provided the system generates one based on the bbox extremes')
|
||||
parser.add_argument('-n', '--uname', type = str, dest = 'uname', default = None, help = 'User name if using a server that requires authentication')
|
||||
parser.add_argument('-w', '--password', type = str, dest = 'password', default = None, help = 'Password if using a server that requires authentication')
|
||||
parser.add_argument('-t', '--type', type = str, dest = 'type', default = 'version3', help = \
|
||||
'Use version 3 or version 2 SRTM, or nasadem')
|
||||
parser.add_argument('-x', '--noextras', action = 'store_true', dest = 'noextras', help = 'Use this flag if the filenames do not have extra part')
|
||||
parser.add_argument('-u', '--url', type = str, dest = 'url', default = None, help = \
|
||||
'If --type=version2 then this is part of the url where the DEM files are located. The actual location must be' + \
|
||||
'the one specified by --url plus /srtm/version2_1/SRTM(1,3).' \
|
||||
+'If --type=version3 then it represents the full path url')
|
||||
args = parser.parse_args()
|
||||
#first get the url,uname and password since are needed in the constructor
|
||||
|
||||
ds = createDemStitcher(args.type)
|
||||
ds.configure()
|
||||
|
||||
#NASADEM only available in 1-arc sec resolution
|
||||
if(args.type == 'nasadem'):
|
||||
args.source == 1
|
||||
|
||||
if(args.url):
|
||||
if(args.type == 'version3'):
|
||||
if(args.source == 1):
|
||||
ds._url1 = args.url
|
||||
elif(args.source == 3):
|
||||
ds._url3 = args.url
|
||||
else:
|
||||
print('Unrecognized source')
|
||||
raise ValueError
|
||||
|
||||
else:
|
||||
ds.setUrl(args.url)
|
||||
ds.setUsername(args.uname)
|
||||
ds.setPassword(args.password)
|
||||
ds._keepAfterFailed = True
|
||||
#avoid to accidentally remove local file if -k is forgotten
|
||||
#if one wants can remove them manually
|
||||
if(args.local):
|
||||
args.keep = True
|
||||
if(args.meta == 'xml'):
|
||||
ds.setCreateXmlMetadata(True)
|
||||
elif(args.meta == 'rsc'):
|
||||
ds.setCreateRscMetadata(True)
|
||||
if(args.noextras):
|
||||
ds._hasExtras = False
|
||||
ds.setUseLocalDirectory(args.local)
|
||||
ds.setFillingValue(args.fillingValue)
|
||||
ds.setFilling() if args.filling else ds.setNoFilling()
|
||||
if(args.action == 'stitch'):
|
||||
if(args.bbox):
|
||||
lat = args.bbox[0:2]
|
||||
lon = args.bbox[2:4]
|
||||
if (args.output is None):
|
||||
args.output = ds.defaultName(args.bbox)
|
||||
|
||||
if not(ds.stitchDems(lat,lon,args.source,args.output,args.dir,keep=args.keep)):
|
||||
print('Could not create a stitched DEM. Some tiles are missing')
|
||||
else:
|
||||
if(args.correct):
|
||||
#ds.correct(args.output,args.source,width,min(lat[0],lat[1]),min(lon[0],lon[1]))
|
||||
demImg = ds.correct()
|
||||
# replace filename with full path including dir in which file is located
|
||||
demImg.filename = os.path.abspath(os.path.join(args.dir, demImg.filename))
|
||||
demImg.setAccessMode('READ')
|
||||
demImg.renderHdr()
|
||||
else:
|
||||
print('Error. The --bbox (or -b) option must be specified when --action stitch is used')
|
||||
raise ValueError
|
||||
elif(args.action == 'download'):
|
||||
if(args.bbox):
|
||||
lat = args.bbox[0:2]
|
||||
lon = args.bbox[2:4]
|
||||
ds.getDemsInBox(lat,lon,args.source,args.dir)
|
||||
#can make the bbox and pairs mutually esclusive if replace the if below with elif
|
||||
if(args.pairs):
|
||||
ds.downloadFilesFromList(args.pairs[::2],args.pairs[1::2],args.source,args.dir)
|
||||
if(not (args.bbox or args.pairs)):
|
||||
print('Error. Either the --bbox (-b) or the --pairs (-p) options must be specified when --action download is used')
|
||||
raise ValueError
|
||||
|
||||
else:
|
||||
print('Unrecognized action -a or --action',args.action)
|
||||
return
|
||||
|
||||
if(args.report):
|
||||
for k,v in list(ds._downloadReport.items()):
|
||||
print(k,'=',v)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
|
@ -0,0 +1,81 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2018 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Eric Gurrola
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
import sys
|
||||
import isce
|
||||
from isceobj.InsarProc.createDem import createDem
|
||||
from iscesys.DataManager import createManager
|
||||
|
||||
class INSAR:
|
||||
def __init__(self, snwe):
|
||||
# flag to control continuation of processing in the case that
|
||||
# a dem is not available or cannot be downloaded. Obviously,
|
||||
# this should be False for this application
|
||||
self.proceedIfZeroDem = False
|
||||
|
||||
class SELF:
|
||||
def __init__(me, snwe, hiresonly=False):
|
||||
me.geocode_bbox = snwe
|
||||
me.insar = INSAR(snwe)
|
||||
me.demStitcher = createManager('dem1', 'iscestitcher')
|
||||
# True indicates, to only download from high res server.
|
||||
# False indicates, download high res dem if available,
|
||||
# otherwise download from the low res server.
|
||||
me.useHighResolutionDemOnly = hiresonly
|
||||
|
||||
class INFO:
|
||||
def __init__(self, snwe):
|
||||
self.extremes = snwe
|
||||
def getExtremes(self, x):
|
||||
return self.extremes
|
||||
|
||||
if __name__=="__main__":
|
||||
if len(sys.argv) < 5:
|
||||
print("Usage: demdb.py s n w e [h]")
|
||||
print("where s, n, w, e are latitude, longitude bounds in degrees")
|
||||
print("The optional 'h' flag indicates to only download a high res dem,"+
|
||||
"if available.\n"
|
||||
"If 'h' is not on the command line, then a low res dem will be "+
|
||||
"downloaded,\nif the hi res is not available.")
|
||||
|
||||
sys.exit(0)
|
||||
|
||||
snwe = list(map(float,sys.argv[1:5]))
|
||||
print("snwe = ", snwe)
|
||||
if 'h' in sys.argv:
|
||||
print("set hiresonly to True")
|
||||
hiresonly = True
|
||||
else:
|
||||
hiresonly = False
|
||||
|
||||
self = SELF(snwe, hiresonly)
|
||||
info = INFO(snwe)
|
||||
createDem(self,info)
|
||||
|
|
@ -0,0 +1,104 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2017 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: David Bekaert
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
from osgeo import gdal
|
||||
from isce.applications.gdal2isce_xml import gdal2isce_xml
|
||||
|
||||
|
||||
# command line parsing of input file
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
parser = argparse.ArgumentParser(description='Generate down-sample DEM from wgs84.vrt DEM file')
|
||||
parser.add_argument('-i','--input', dest='input_dem_vrt', type=str, required=True, help='Input DEM vrt filename (GDAL supported)')
|
||||
parser.add_argument('-rmeter','--res_meter', dest='res_meter', type=str, default='', required=False, help='DEM output resolution in m units')
|
||||
parser.add_argument('-rsec','--res_seconds', dest='res_seconds', type=str, default ='', required=False, help='DEM output resolution in arc seconds units')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
# main script
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
|
||||
# Parse command line
|
||||
inps = cmdLineParse()
|
||||
|
||||
if inps.res_meter == '' and inps.res_seconds == '':
|
||||
raise Exception('Provide either rmeter or rsec argument for DEM resolution')
|
||||
|
||||
# check if the input file exist
|
||||
if not os.path.isfile(inps.input_dem_vrt):
|
||||
raise Exception('Input file is not found ....')
|
||||
# check if the provided input file is a .vrt file and also get the envi filename
|
||||
input_dem_envi, file_extension = os.path.splitext(inps.input_dem_vrt)
|
||||
if file_extension != '.vrt':
|
||||
raise Exception('Input file is not a vrt file ....')
|
||||
# get the file path
|
||||
input_path = os.path.dirname(os.path.abspath(inps.input_dem_vrt))
|
||||
|
||||
|
||||
# convert the output resolution from m in degrees
|
||||
# (this is approximate, could use instead exact expression)
|
||||
if inps.res_meter != '':
|
||||
gdal_opts = gdal.WarpOptions(format='ENVI',
|
||||
outputType=gdal.GDT_Int16,
|
||||
dstSRS='EPSG:4326',
|
||||
xRes=float(inps.res_meter)/110/1000,
|
||||
yRes=float(inps.res_meter)/110/1000,
|
||||
targetAlignedPixels=True)
|
||||
# res_degree = float(inps.res_meter)/110/1000
|
||||
elif inps.res_seconds != '':
|
||||
gdal_opts = gdal.WarpOptions(format='ENVI',
|
||||
outputType=gdal.GDT_Int16,
|
||||
dstSRS='EPSG:4326',
|
||||
xRes=float(inps.res_seconds)*1/60*1/60,
|
||||
yRes=float(inps.res_seconds)*1/60*1/60,
|
||||
targetAlignedPixels=True)
|
||||
# res_degree = float(1/60*1/60*float(inps.res_seconds))
|
||||
|
||||
# The ENVI filename of the coarse DEM to be generated
|
||||
coarse_dem_envi = os.path.join(input_path, "Coarse_" + input_dem_envi)
|
||||
|
||||
# Using gdal to down-sample the WGS84 DEM
|
||||
# cmd = "gdalwarp -t_srs EPSG:4326 -ot Int16 -of ENVI -tap -tr " + str(res_degree) + " " + str(res_degree) + " " + inps.input_dem_vrt + " " + coarse_dem_envi
|
||||
# os.system(cmd)
|
||||
ds = gdal.Warp(coarse_dem_envi,inps.input_dem_vrt,options=gdal_opts)
|
||||
ds = None
|
||||
|
||||
# Generating the ISCE xml and vrt of this coarse DEM
|
||||
gdal2isce_xml(coarse_dem_envi)
|
||||
|
|
@ -0,0 +1,644 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import time
|
||||
import os
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import iscesys
|
||||
from iscesys.Compatibility import Compatibility
|
||||
from isceobj.Pause import pause
|
||||
|
||||
|
||||
from iscesys.ImageUtil.ImageUtil import ImageUtil as IU
|
||||
from iscesys.Component.Application import Application
|
||||
from isce.applications.insarApp import _InsarBase, logger
|
||||
|
||||
|
||||
class Dpm(_InsarBase):
|
||||
"""Dpm Application class:
|
||||
|
||||
Implements Dpm processing flow for a pair of scenes from
|
||||
sensor raw data to geocoded correlation.
|
||||
"""
|
||||
def __init__(self):
|
||||
super(Dpm, self).__init__()
|
||||
## This indicates something has gone wrong, I must delete geocode.
|
||||
del self.runGeocode
|
||||
|
||||
## extends _InsarBase_steps, but not in the same was as main
|
||||
def _steps(self):
|
||||
super(Dpm, self)._steps()
|
||||
|
||||
# Geocode
|
||||
self.step('geocodecorifg', func=self.geocodeCorIfg)
|
||||
self.step('geocodecor4rlks', func=self.geocodeCor4rlks)
|
||||
|
||||
self.step('renderProcDoc', func=self.renderProcDoc)
|
||||
|
||||
self.step('timerend', local='timeEnd',func=time.time)
|
||||
|
||||
self.step('logtime', func=logger.info,
|
||||
delayed_args = (" 'Total Time: %i seconds'%(timeEnd-timeStart)",)
|
||||
)
|
||||
return None
|
||||
|
||||
def renderProcDoc(self):
|
||||
self.insar.procDoc.renderXml()
|
||||
|
||||
def coherence(self):
|
||||
self.runCoherence(self.correlation_method)
|
||||
self.renderProcDoc()
|
||||
|
||||
|
||||
def geocodeCorIfg(self):
|
||||
corFilename = self.insar.coherenceFilename
|
||||
corintFilename = corFilename.replace('.cor','.corint')
|
||||
widthInt = self.insar.resampIntImage.width
|
||||
rmg_to_cmplx(corFilename,corintFilename,widthInt)
|
||||
corintGeocodeFilename = corintFilename+'.geo'
|
||||
demGeocodeFilename = corintFilename+'.demcrop'
|
||||
geo = self.runGeocode(corintFilename,
|
||||
widthInt,
|
||||
corintGeocodeFilename,
|
||||
demGeocodeFilename)
|
||||
geoWidth = geo.computeGeoImageWidth()
|
||||
print("geocodecor: widthGeo = ", geoWidth)
|
||||
ifgGeocodeFilename = self.insar.geocodeFilename
|
||||
demCropFilename = self.insar.demCropFilename
|
||||
topoflatIntFilename = self.insar.topophaseFlatFilename
|
||||
widthInt = self.insar.resampIntImage.width
|
||||
geo = self.runGeocode(topoflatIntFilename,
|
||||
widthInt,
|
||||
ifgGeocodeFilename,
|
||||
demCropFilename)
|
||||
geoWidth = geo.computeGeoImageWidth()
|
||||
print("geocodeifg: widthGeo = ", geoWidth)
|
||||
|
||||
corGeocodeFilename = corFilename + '.geo'
|
||||
cmplx_to_rmg(corintGeocodeFilename, corGeocodeFilename, geoWidth)
|
||||
|
||||
self.geo_to_rsc(ifgGeocodeFilename, corGeocodeFilename)
|
||||
|
||||
return None
|
||||
|
||||
def geocodeCor4rlks(self):
|
||||
|
||||
corFilename = self.insar.coherenceFilename
|
||||
corintFilename = corFilename.replace('.cor','.corint')
|
||||
widthInt = self.insar.resampIntImage.width
|
||||
|
||||
#Not the right place for this block. Create the 4rlks correlation file and
|
||||
# geocode it.
|
||||
#~/Util/isce/components/mroipac/looks/Nbymhgt.py
|
||||
# topophase.cor topophase_4rlks.cor 9480 4 4
|
||||
cor4rlksFilename = corFilename.replace('.cor','_4rlks.cor')
|
||||
from mroipac.looks.Nbymhgt import Nbymhgt
|
||||
nbymh = Nbymhgt()
|
||||
nbymh.inputImage = corFilename
|
||||
nbymh.outputImage = cor4rlksFilename
|
||||
nbymh.width = widthInt
|
||||
nbymh.rangeLook = 4
|
||||
nbymh.azimuthLook = 4
|
||||
nbymh.nbymhgt()
|
||||
width4rlksInt = widthInt/4
|
||||
corint4rlksFilename = cor4rlksFilename.replace('.cor','.corint')
|
||||
rmg_to_cmplx(cor4rlksFilename, corint4rlksFilename, width4rlksInt)
|
||||
corint4rlksGeocodeFilename = corint4rlksFilename+'.geo'
|
||||
dem4rlksGeocodeFilename = corint4rlksFilename+'.demcrop'
|
||||
geo4rlks = self.runGeocode4rlks(corint4rlksFilename,
|
||||
width4rlksInt,
|
||||
corint4rlksGeocodeFilename,
|
||||
dem4rlksGeocodeFilename)
|
||||
geo4rlksWidth = geo4rlks.computeGeoImageWidth()
|
||||
print("geocodecor: widthGeo = ", geo4rlksWidth)
|
||||
|
||||
cor4rlksGeocodeFilename = cor4rlksFilename + '.geo'
|
||||
cmplx_to_rmg(corint4rlksGeocodeFilename, cor4rlksGeocodeFilename, geo4rlksWidth)
|
||||
|
||||
# self.geo_to_rsc(ifgGeocodeFilename,corGeocodeFilename)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def geo_to_rsc(self, ifgGeoFile, corGeoFile):
|
||||
from isceobj.XmlUtil.XmlUtil import XmlUtil
|
||||
|
||||
xmlDat = {'Coordinate1':{'size':None,'startingValue':None,'delta':None},
|
||||
'Coordinate2':{'size':None,'startingValue':None,'delta':None}}
|
||||
|
||||
rscXML = {'WIDTH':('Coordinate1','size'),
|
||||
'X_FIRST':('Coordinate1','startingValue'),
|
||||
'X_STEP':('Coordinate1','delta'),
|
||||
'FILE_LENGTH':('Coordinate2','size'),
|
||||
'Y_FIRST':('Coordinate2','startingValue'),
|
||||
'Y_STEP':('Coordinate2','delta')}
|
||||
|
||||
rscOrder = ('WIDTH','FILE_LENGTH','X_FIRST','X_STEP','Y_FIRST','Y_STEP')
|
||||
|
||||
ifgGeoXmlFile = ifgGeoFile + '.xml'
|
||||
|
||||
xu = XmlUtil()
|
||||
xuf = xu.readFile(ifgGeoXmlFile)
|
||||
c = xuf.findall('component')
|
||||
|
||||
for cx in c:
|
||||
cxn = cx.attrib['name']
|
||||
p = cx.findall('property')
|
||||
for e in p:
|
||||
xmlDat[cxn][e.attrib['name']] = e.findall('value')[0].text
|
||||
|
||||
corGeoRscFile = corGeoFile + '.rsc'
|
||||
|
||||
with open(corGeoRscFile,'w') as RSC:
|
||||
spc = " "*25
|
||||
for a in rscOrder:
|
||||
RSC.write(
|
||||
"%s%s%s\n" % (a,spc[0:25-len(a)],xmlDat[rscXML[a][0]][rscXML[a][1]])
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def runGeocode(self, inFilename, widthIn, geoFilename, demcropFilename):
|
||||
import stdproc
|
||||
from isceobj import createDemImage
|
||||
|
||||
print("runGeocode: inFilename, widthIn = ", inFilename, widthIn)
|
||||
print("runGeocode: geoFilename, demcropFilename = ", geoFilename, demcropFilename)
|
||||
|
||||
logger.info("Geocoding Image")
|
||||
|
||||
# Initialize the Dem
|
||||
|
||||
demImage = createDemImage()
|
||||
IU.copyAttributes(self.insar.demImage, demImage)
|
||||
demImage.setAccessMode('read')
|
||||
demImage.createImage()
|
||||
|
||||
# Initialize the flattened interferogram
|
||||
from isceobj import createIntImage, createImage
|
||||
intImage = createIntImage()
|
||||
intImage.filename = inFilename
|
||||
intImage.width = widthIn
|
||||
intImage.setAccessMode('read')
|
||||
intImage.createImage()
|
||||
|
||||
minLat, maxLat, minLon, maxLon = self.insar.topo.snwe
|
||||
|
||||
planet = self.insar.referenceFrame.instrument.getPlatform().getPlanet()
|
||||
|
||||
objGeo = stdproc.createGeocode()
|
||||
objGeo.listInputPorts()
|
||||
objGeo.wireInputPort(name='peg',object=self.insar.peg)
|
||||
objGeo.wireInputPort(name='frame',object=self.insar.referenceFrame)
|
||||
objGeo.wireInputPort(name='planet',object=planet)
|
||||
objGeo.wireInputPort(name='dem',object=demImage)
|
||||
objGeo.wireInputPort(name='interferogram',object=intImage)
|
||||
objGeo.wireInputPort(name='geoPosting', object=self.geoPosting)
|
||||
print("self.geoPosting = ", self.geoPosting)
|
||||
|
||||
objGeo.snwe = minLat, maxLat, minLon, maxLon
|
||||
objGeo.geoFilename = geoFilename
|
||||
objGeo.demCropFilename = demcropFilename
|
||||
|
||||
#set the tag used in the outfile. each message is precided by this tag
|
||||
#is the writer is not of "file" type the call has no effect
|
||||
objGeo.stdWriter = self.stdWriter.set_file_tags("geocode", "log", "err", "out")
|
||||
|
||||
# see mocompbaseline
|
||||
objFormSlc1 = self.insar.formSLC1
|
||||
mocompPosition1 = objFormSlc1.getMocompPosition()
|
||||
posIndx = 1
|
||||
objGeo.referenceOrbit = mocompPosition1[posIndx]
|
||||
prf1 = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency()
|
||||
dp = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0]
|
||||
v = self.insar.procVelocity
|
||||
h = self.insar.averageHeight
|
||||
objGeo.setDopplerCentroidConstantTerm(dp)
|
||||
objGeo.setBodyFixedVelocity(v)
|
||||
objGeo.setSpacecraftHeight(h)
|
||||
objGeo.setNumberRangeLooks(self.insar.numberRangeLooks)
|
||||
objGeo.setNumberAzimuthLooks(self.insar.numberAzimuthLooks)
|
||||
# I have no idea what ismocomp means
|
||||
goodLines = self.insar.numberValidPulses
|
||||
patchSize = self.insar.patchSize
|
||||
# this variable was hardcoded in geocode.f90 and was equal to (8192 - 2048)/2
|
||||
is_mocomp = self.insar.is_mocomp
|
||||
# is_mocomp = int((patchSize - goodLines)/2)
|
||||
objGeo.setISMocomp(is_mocomp)
|
||||
|
||||
objGeo.geocode()
|
||||
|
||||
intImage.finalizeImage()
|
||||
demImage.finalizeImage()
|
||||
return objGeo
|
||||
|
||||
def runGeocode4rlks(self, inFilename, widthIn, geoFilename, demcropFilename):
|
||||
import stdproc
|
||||
from isceobj import createIntImage, createImage
|
||||
|
||||
print("runGeocode4rlks: inFilename, widthIn = ", inFilename, widthIn)
|
||||
print("runGeocode4rlks: geoFilename, demcropFilename = ",
|
||||
geoFilename,
|
||||
demcropFilename)
|
||||
pause(message="Paused in runGeocode4rlks")
|
||||
|
||||
logger.info("Geocoding Image")
|
||||
|
||||
# Initialize the Dem
|
||||
from isceobj import createDemImage
|
||||
demImage = createDemImage()
|
||||
IU.copyAttributes(self.insar.demImage,demImage)
|
||||
demImage.setAccessMode('read')
|
||||
demImage.createImage()
|
||||
print("demImage.firstLatitude = ", demImage.firstLatitude)
|
||||
print("demImage.firstLongitude = ", demImage.firstLongitude)
|
||||
print("demImage.deltaLatitude = ", demImage.deltaLatitude)
|
||||
print("demImage.deltaLongitude = ", demImage.deltaLongitude)
|
||||
print("demImage.width = ", demImage.width)
|
||||
print("demImage.length = ", demImage.length)
|
||||
demImage_lastLatitude = (
|
||||
demImage.firstLatitude + (demImage.length-1)*demImage.deltaLatitude
|
||||
)
|
||||
demImage_lastLongitude = (
|
||||
demImage.firstLongitude + (demImage.width-1)*demImage.deltaLongitude
|
||||
)
|
||||
|
||||
print("demImage_lastLatitude = ", demImage_lastLatitude)
|
||||
print("demImage_lastLongitude = ", demImage_lastLongitude)
|
||||
|
||||
# Initialize the input image
|
||||
intImage = createIntImage()
|
||||
intImage.setFilename(inFilename)
|
||||
intImage.setWidth(widthIn)
|
||||
intImage.setAccessMode('read')
|
||||
intImage.createImage()
|
||||
|
||||
minLat, maxLat, minLon, maxLon = self.insar.topo.snwe
|
||||
print("objTopo.minLat = ", minLat)
|
||||
print("objTopo.minLon = ", minLon)
|
||||
print("objTopo.maxLat = ", maxLat)
|
||||
print("objTopo.maxLon = ", maxLon)
|
||||
pause(message="Paused in runGeocode4rlks")
|
||||
|
||||
planet = self.insar.referenceFrame.instrument.getPlatform().getPlanet()
|
||||
|
||||
objGeo = stdproc.createGeocode()
|
||||
objGeo.listInputPorts()
|
||||
objGeo.wireInputPort(name='peg',object=self.insar.peg)
|
||||
# objGeo.wireInputPort(name='frame',object=self.insar.referenceFrame)
|
||||
objGeo.rangeFirstSample = self.insar.referenceFrame.getStartingRange()
|
||||
objGeo.slantRangePixelSpacing = self.insar.referenceFrame.instrument.getRangePixelSize()*4
|
||||
objGeo.prf = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency()
|
||||
objGeo.radarWavelength = self.insar.referenceFrame.instrument.getRadarWavelength()
|
||||
objGeo.wireInputPort(name='planet',object=planet)
|
||||
objGeo.wireInputPort(name='dem',object=demImage)
|
||||
objGeo.wireInputPort(name='interferogram',object=intImage)
|
||||
print("self.geoPosting = ",self.geoPosting)
|
||||
objGeo.wireInputPort(name='geoPosting',object=self.geoPosting)
|
||||
|
||||
objGeo.snwe = minLat, maxLat, minLon, maxLon
|
||||
objGeo.setGeocodeFilename(geoFilename)
|
||||
objGeo.setDemCropFilename(demcropFilename)
|
||||
|
||||
#set the tag used in the outfile. each message is precided by this tag
|
||||
#is the writer is not of "file" type the call has no effect
|
||||
objGeo.stdWriter = self.stdWriter.set_file_tags("geocode", "log", "err", "out")
|
||||
|
||||
# see mocompbaseline
|
||||
objFormSlc1 = self.insar.formSLC1
|
||||
mocompPosition1 = objFormSlc1.getMocompPosition()
|
||||
posIndx = 1
|
||||
objGeo.setReferenceOrbit(mocompPosition1[posIndx])
|
||||
prf1 = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency()
|
||||
dp = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0]
|
||||
v = self.insar.procVelocity
|
||||
h = self.insar.averageHeight
|
||||
objGeo.setDopplerCentroidConstantTerm(dp)
|
||||
objGeo.setBodyFixedVelocity(v)
|
||||
objGeo.setSpacecraftHeight(h)
|
||||
objGeo.setNumberRangeLooks(1.0) #self.insar.numberRangeLooks)
|
||||
objGeo.setNumberAzimuthLooks(1.0) #self.insar.numberAzimuthLooks)
|
||||
# I have no idea what ismocomp means
|
||||
goodLines = self.insar.numberValidPulses
|
||||
patchSize = self.insar.patchSize
|
||||
# this variable was hardcoded in geocode.f90 and was equal to (8192 - 2048)/2
|
||||
is_mocomp = self.insar.is_mocomp
|
||||
# is_mocomp = int((patchSize - goodLines)/2)
|
||||
objGeo.setISMocomp(is_mocomp)
|
||||
|
||||
objGeo.geocode()
|
||||
|
||||
print("Input state paraemters to gecode.f90:")
|
||||
print("Minimum Latitude = ", objGeo.minimumLatitude)
|
||||
print("Maximum Latitude = ", objGeo.maximumLatitude)
|
||||
print("Minimum Longitude = ", objGeo.minimumLongitude)
|
||||
print("Maximum Longitude = ", objGeo.maximumLongitude)
|
||||
print("Ellipsoid Major Semi Axis = ", objGeo.ellipsoidMajorSemiAxis)
|
||||
print("Ellipsoid Eccentricity Squared = ", objGeo.ellipsoidEccentricitySquared)
|
||||
print("Peg Latitude = ", objGeo.pegLatitude)
|
||||
print("Peg Longitude = ", objGeo.pegLongitude)
|
||||
print("Peg Heading = ", objGeo.pegHeading)
|
||||
print("Range Pixel Spacing = ", objGeo.slantRangePixelSpacing)
|
||||
print("Range First Sample = ", objGeo.rangeFirstSample)
|
||||
print("Spacecraft Height = ", objGeo.spacecraftHeight)
|
||||
print("Planet Local Radius = ", objGeo.planetLocalRadius)
|
||||
print("Body Fixed Velocity = ", objGeo.bodyFixedVelocity)
|
||||
print("Doppler Centroid Constant Term = ", objGeo.dopplerCentroidConstantTerm)
|
||||
print("PRF = ", objGeo.prf)
|
||||
print("Radar Wavelength = ", objGeo.radarWavelength)
|
||||
print("S Coordinate First Line = ", objGeo.sCoordinateFirstLine)
|
||||
print("Azimuth Spacing = ", objGeo.azimuthSpacing)
|
||||
print("First Latitude = ", objGeo.firstLatitude)
|
||||
print("First Longitude = ", objGeo.firstLongitude)
|
||||
print("Delta Latitude = ", objGeo.deltaLatitude)
|
||||
print("Delta Longitude = ", objGeo.deltaLongitude)
|
||||
print("Length = ", objGeo.length)
|
||||
print("Width = ", objGeo.width)
|
||||
print("Number Range Looks = ", objGeo.numberRangeLooks)
|
||||
print("Number Azimuth Looks = ", objGeo.numberAzimuthLooks)
|
||||
print("Number Points Per DEM Post = ", objGeo.numberPointsPerDemPost)
|
||||
print("Is Mocomp = ", objGeo.isMocomp)
|
||||
print("DEM Width = ", objGeo.demWidth)
|
||||
print("DEM Length = ", objGeo.demLength)
|
||||
# print("Reference Orbit = ", objGeo.referenceOrbit)
|
||||
print("Dim1 Reference Orbit = ", objGeo.dim1_referenceOrbit)
|
||||
intImage.finalizeImage()
|
||||
demImage.finalizeImage()
|
||||
return objGeo
|
||||
|
||||
|
||||
def runGeocodeCor(self):
|
||||
import stdproc
|
||||
|
||||
logger.info("Geocoding Correlation")
|
||||
objFormSlc1 = self.insar.formSLC1
|
||||
# Initialize the Dem
|
||||
from isceobj import createDemImage, createIntImage, createImage
|
||||
demImage = createDemImage()
|
||||
IU.copyAttributes(self.insar.demImage,demImage)
|
||||
demImage.setAccessMode('read')
|
||||
demImage.createImage()
|
||||
|
||||
topoflatIntFilename = self.insar.topophaseFlatFilename
|
||||
widthInt = self.insar.resampIntImage.width
|
||||
|
||||
intImage = createIntImage()
|
||||
widthInt = self.insar.resampIntImage.width
|
||||
intImage.setFilename(corintFilename)
|
||||
intImage.setWidth(widthInt)
|
||||
intImage.setAccessMode('read')
|
||||
intImage.createImage()
|
||||
|
||||
posIndx = 1
|
||||
mocompPosition1 = objFormSlc1.getMocompPosition()
|
||||
|
||||
minLat, maxLat, minLon, maxLon = self.insar.topo.snwe
|
||||
|
||||
planet = self.insar.referenceFrame.instrument.getPlatform().getPlanet()
|
||||
|
||||
objGeo = stdproc.createGeocode()
|
||||
objGeo.wireInputPort(name='peg',object=self.insar.peg)
|
||||
objGeo.wireInputPort(name='frame',object=self.insar.referenceFrame)
|
||||
objGeo.wireInputPort(name='planet',object=planet)
|
||||
objGeo.wireInputPort(name='dem',object=demImage)
|
||||
objGeo.wireInputPort(name='interferogram',object=intImage)
|
||||
objGeo.snwe = minLat, maxLat, minLon, maxLon
|
||||
corGeocodeFilename = corintFilename+'.geo'
|
||||
demGeocodeFilename = corintFilename+'.demcrop'
|
||||
objGeo.setGeocodeFilename(corGeocodeFilename)
|
||||
objGeo.setDemCropFilename(demGeocodeFilename)
|
||||
#set the tag used in the outfile. each message is precided by this tag
|
||||
#is the writer is not of "file" type the call has no effect
|
||||
objGeo.stdWriter = self.stdWriter.set_file_tags("geocode", "log", "err", "out")
|
||||
# see mocompbaseline
|
||||
objGeo.setReferenceOrbit(mocompPosition1[posIndx])
|
||||
prf1 = self.insar.referenceFrame.instrument.getPulseRepetitionFrequency()
|
||||
dp = self.insar.dopplerCentroid.getDopplerCoefficients(inHz=False)[0]
|
||||
v = self.insar.procVelocity
|
||||
h = self.insar.averageHeight
|
||||
objGeo.setDopplerCentroidConstantTerm(dp)
|
||||
objGeo.setBodyFixedVelocity(v)
|
||||
objGeo.setSpacecraftHeight(h)
|
||||
objGeo.setNumberRangeLooks(self.insar.numberRangeLooks)
|
||||
objGeo.setNumberAzimuthLooks(self.insar.numberAzimuthLooks)
|
||||
# I have no idea what ismocomp means
|
||||
goodLines = self.insar.numberValidPulses
|
||||
patchSize = self.insar.patchSize
|
||||
# this variable was hardcoded in geocode.f90 and was equal to (8192 - 2048)/2
|
||||
is_mocomp = int((patchSize - goodLines)/2)
|
||||
objGeo.setISMocomp(is_mocomp)
|
||||
|
||||
objGeo.geocode()
|
||||
|
||||
intImage.finalizeImage()
|
||||
demImage.finalizeImage()
|
||||
return objGeo
|
||||
|
||||
|
||||
def restart(self):
|
||||
print("Restarting with Filtering")
|
||||
return
|
||||
|
||||
## main() extends _InsarBase.main()
|
||||
def main(self):
|
||||
import time
|
||||
timeStart = time.time()
|
||||
|
||||
super(Dpm, self).main()
|
||||
|
||||
# self.runCorrect()
|
||||
|
||||
self.runShadecpx2rg()
|
||||
|
||||
self.runRgoffset()
|
||||
|
||||
# Cull offoutliers
|
||||
self.iterate_runOffoutliers()
|
||||
|
||||
self.runResamp_only()
|
||||
|
||||
self.insar.topoIntImage=self.insar.resampOnlyImage
|
||||
self.runTopo()
|
||||
self.runCorrect()
|
||||
|
||||
# Coherence ?
|
||||
self.runCoherence(method=self.correlation_method)
|
||||
|
||||
#ouput the procDoc and pause in order to process coherence off line
|
||||
#this processing should really be done using _steps.
|
||||
self.insar.procDoc.renderXml()
|
||||
pause(message="Paused in main")
|
||||
|
||||
# Filter ?
|
||||
self.runFilter()
|
||||
|
||||
# Unwrap ?
|
||||
self.verifyUnwrap()
|
||||
|
||||
# Geocode
|
||||
self.geocodeCorIfg()
|
||||
|
||||
timeEnd = time.time()
|
||||
logger.info("Total Time: %i seconds" %(timeEnd - timeStart))
|
||||
|
||||
self.insar.procDoc.renderXml()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def rmgcor_ifgphs_to_cmplx(rmg,ifg,cpx,width):
|
||||
import struct
|
||||
import math
|
||||
|
||||
raise DeprecationWarning("Don't ude this function")
|
||||
|
||||
length = int(os.stat(ifg).st_size/8./width)
|
||||
|
||||
rmgFile = open(rmg,'rb')
|
||||
ifgFile = open(ifg,'rb')
|
||||
cpxFile = open(cpx,'wb')
|
||||
|
||||
w = int(width)
|
||||
width2 = 2*w
|
||||
fmt = "%df" % (width2,)
|
||||
aCpxLine = [0.0]*width2
|
||||
|
||||
for iii in range(length):
|
||||
anIfgLine = struct.unpack(fmt,ifgFile.read(width2*4))
|
||||
aRmgLine = struct.unpack(fmt,rmgFile.read(width2*4))
|
||||
for jjj in range(w):
|
||||
ifgPhase = math.atan2(anIfgLine[2*jjj+1],anIfgLine[2*jjj])
|
||||
# ampVal = aRmgLine[jjj]
|
||||
corVal = aRmgLine[w+jjj]
|
||||
aCpxLine[2*jjj] = corVal*math.cos(ifgPhase)
|
||||
aCpxLine[2*jjj+1] = corVal*math.sin(ifgPhase)
|
||||
cpxFile.write(struct.pack(fmt,*aCpxLine))
|
||||
|
||||
rmgFile.close()
|
||||
ifgFile.close()
|
||||
cpxFile.close()
|
||||
return
|
||||
|
||||
def ifg1amp_ifg2amp_to_rmg(ifg1,ifg2,rmg,width):
|
||||
import struct
|
||||
import math
|
||||
|
||||
raise DeprecationWarning("Don't ude this function")
|
||||
|
||||
length = int(os.stat(ifg1).st_size/8./width)
|
||||
|
||||
ifg1File = open(ifg1,'rb')
|
||||
ifg2File = open(ifg2,'rb')
|
||||
rmgFile = open(rmg,'wb')
|
||||
|
||||
w = int(width)
|
||||
width2 = 2*w
|
||||
fmt = "%df" % (width2,)
|
||||
aRmgLine = [0.0]*width2
|
||||
|
||||
for iii in range(length):
|
||||
anIfg1Line = struct.unpack(fmt,ifg1File.read(width2*4))
|
||||
anIfg2Line = struct.unpack(fmt,ifg2File.read(width2*4))
|
||||
for jjj in range(w):
|
||||
amp1 = math.sqrt(anIfg1Line[2*jjj]**2 + anIfg1Line[2*jjj+1]**2)
|
||||
amp2 = math.sqrt(anIfg2Line[2*jjj]**2 + anIfg2Line[2*jjj+1]**2)
|
||||
aRmgLine[jjj] = amp1
|
||||
aRmgLine[w + jjj] = amp2
|
||||
rmgFile.write(struct.pack(fmt,*aRmgLine))
|
||||
|
||||
ifg1File.close()
|
||||
ifg2File.close()
|
||||
rmgFile.close()
|
||||
return
|
||||
|
||||
def rmg_to_cmplx(rmg,cpx,width):
|
||||
import struct
|
||||
import math
|
||||
|
||||
length = int(os.stat(rmg).st_size/8./width)
|
||||
|
||||
rmgFile = open(rmg,'rb')
|
||||
cpxFile = open(cpx,'wb')
|
||||
|
||||
w = int(width)
|
||||
width2 = 2*w
|
||||
fmt = "%df" % (width2,)
|
||||
aCpxLine = [0.0]*width2
|
||||
|
||||
for iii in range(length):
|
||||
aRmgLine = struct.unpack(fmt,rmgFile.read(width2*4))
|
||||
for jjj in range(w):
|
||||
ampVal = aRmgLine[jjj]
|
||||
corVal = aRmgLine[w+jjj]
|
||||
aCpxLine[2*jjj] = ampVal
|
||||
aCpxLine[2*jjj+1] = corVal
|
||||
cpxFile.write(struct.pack(fmt,*aCpxLine))
|
||||
|
||||
rmgFile.close()
|
||||
cpxFile.close()
|
||||
return
|
||||
|
||||
def cmplx_to_rmg(ifg1,rmg,width):
|
||||
import struct
|
||||
import math
|
||||
|
||||
length = int(os.stat(ifg1).st_size/8./width)
|
||||
|
||||
ifg1File = open(ifg1,'rb')
|
||||
rmgFile = open(rmg,'wb')
|
||||
|
||||
w = int(width)
|
||||
width2 = 2*w
|
||||
fmt = "%df" % (width2,)
|
||||
aRmgLine = [0.0]*width2
|
||||
|
||||
for iii in range(length):
|
||||
anIfg1Line = struct.unpack(fmt,ifg1File.read(width2*4))
|
||||
for jjj in range(w):
|
||||
amp1 = anIfg1Line[2*jjj]
|
||||
amp2 = anIfg1Line[2*jjj+1]
|
||||
aRmgLine[jjj] = amp1
|
||||
aRmgLine[w + jjj] = amp2
|
||||
rmgFile.write(struct.pack(fmt,*aRmgLine))
|
||||
|
||||
ifg1File.close()
|
||||
rmgFile.close()
|
||||
return
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
dpm = Dpm()
|
||||
dpm.run()
|
||||
|
||||
|
|
@ -0,0 +1,68 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import datetime
|
||||
from isce import logging
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
|
||||
class extractHDROrbit(FactoryInit):
|
||||
|
||||
def main(self):
|
||||
# Parse the image metadata and extract the image
|
||||
self.logger.info('Parsing image metadata')
|
||||
self.sensorObj.parse()
|
||||
frame = self.sensorObj.getFrame()
|
||||
for sv in frame.getOrbit():
|
||||
epoch = self.datetimeToEpoch(sv.getTime())
|
||||
(x,y,z) = sv.getPosition()
|
||||
(vx,vy,vz) = sv.getVelocity()
|
||||
print(epoch,x,y,z,vx,vy,vz)
|
||||
|
||||
def datetimeToEpoch(self,dt):
|
||||
epoch = dt.hour*60*60 + dt.minute*60 + dt.second
|
||||
return epoch
|
||||
|
||||
def __init__(self,arglist):
|
||||
FactoryInit.__init__(self)
|
||||
self.initFactory(arglist)
|
||||
self.sensorObj = self.getComponent('Sensor')
|
||||
self.logger = logging.getLogger('isce.extractHDROrbits')
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if (len(sys.argv) < 2):
|
||||
print("Usage:%s <xml-parameter file>" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
runObj = extractHDROrbit(sys.argv[1:])
|
||||
runObj.main()
|
||||
|
|
@ -0,0 +1,52 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
import os
|
||||
import argparse
|
||||
import glob
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj.Util.ImageUtil import ImageLib as IML
|
||||
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Fixes pathnames in ISCE image XML files. Can be used to do more things in the future.')
|
||||
parser.add_argument('-i', '--input', type=str, nargs='+', required=True, dest='infile',
|
||||
help = 'Input image for which the XML file needs to be fixed.')
|
||||
|
||||
fname = parser.add_mutually_exclusive_group(required=True)
|
||||
fname.add_argument('-f', '--full', action='store_true',
|
||||
help = 'Replace filename with full path including dir in which file is located')
|
||||
fname.add_argument('-b', '--base', action='store_true',
|
||||
help = 'Replace filename with basename to use in current directory')
|
||||
|
||||
inps = parser.parse_args()
|
||||
return inps
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
inps = cmdLineParse()
|
||||
|
||||
for fname in inps.infile:
|
||||
if fname.endswith('.xml'):
|
||||
fname = os.path.splitext(fname)[0]
|
||||
print('fixing xml file path for file: {}'.format(fname))
|
||||
|
||||
if inps.full:
|
||||
fdir = os.path.dirname(fname)
|
||||
fname = os.path.abspath(os.path.join(fdir, os.path.basename(fname)))
|
||||
else:
|
||||
fname = os.path.basename(os.path.basename(fname))
|
||||
|
||||
img = IML.loadImage(fname)[0]
|
||||
img.filename = fname
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
|
|
@ -0,0 +1,324 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import math
|
||||
from isce import logging
|
||||
import isceobj
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU
|
||||
|
||||
class Focuser(object):
|
||||
|
||||
def __init__(self,rawObj=None):
|
||||
self.rawObj = rawObj
|
||||
self.logger = logging.getLogger('isce.focus')
|
||||
|
||||
def focuser(self):
|
||||
"""
|
||||
Create a make_raw object and then focus it!
|
||||
"""
|
||||
doppler = isceobj.Doppler.useDOPIQ()
|
||||
hhRaw = self.make_raw(self.rawObj,doppler)
|
||||
fd = hhRaw.getDopplerValues().getDopplerCoefficients(inHz=False)
|
||||
# Hard-wire the doppler for point-target analysis
|
||||
# C-band point target Doppler
|
||||
fd = [0.0163810952106773,-0.0000382864254695,0.0000000012335234,0.0]
|
||||
# L-band point target Doppler
|
||||
#fd = [0.0700103587387314, 0.0000030023105646, -0.0000000000629754, 0.0]
|
||||
self.focus(hhRaw,fd)
|
||||
|
||||
def make_raw(self,sensor,doppler):
|
||||
"""
|
||||
Extract the unfocused SAR image and associated data
|
||||
|
||||
@param sensor (\a isceobj.Sensor) the sensor object
|
||||
@param doppler (\a isceobj.Doppler) the doppler object
|
||||
@return (\a make_raw) a make_raw instance
|
||||
"""
|
||||
from make_raw import make_raw
|
||||
import stdproc
|
||||
import isceobj
|
||||
|
||||
# Extract raw image
|
||||
self.logger.info("Creating Raw Image")
|
||||
mr = make_raw()
|
||||
mr.wireInputPort(name='sensor',object=sensor)
|
||||
mr.wireInputPort(name='doppler',object=doppler)
|
||||
mr.make_raw()
|
||||
|
||||
return mr
|
||||
|
||||
def focus(self,mr,fd):
|
||||
"""
|
||||
Focus SAR data
|
||||
|
||||
@param mr (\a make_raw) a make_raw instance
|
||||
@param fd (\a float) Doppler centroid for focusing
|
||||
"""
|
||||
import stdproc
|
||||
import isceobj
|
||||
#from isceobj.Sensor.Generic import Generic
|
||||
|
||||
# Extract some useful variables
|
||||
frame = mr.getFrame()
|
||||
orbit = frame.getOrbit()
|
||||
planet = frame.getInstrument().getPlatform().getPlanet()
|
||||
|
||||
# Calculate Peg Point
|
||||
self.logger.info("Calculating Peg Point")
|
||||
peg = self.calculatePegPoint(frame,orbit,planet)
|
||||
V,H = self.calculateProcessingVelocity(frame,peg)
|
||||
|
||||
# Interpolate orbit
|
||||
self.logger.info("Interpolating Orbit")
|
||||
pt = stdproc.createPulsetiming()
|
||||
pt.wireInputPort(name='frame',object=frame)
|
||||
pt.pulsetiming()
|
||||
orbit = pt.getOrbit()
|
||||
|
||||
# Convert orbit to SCH coordinates
|
||||
self.logger.info("Converting orbit reference frame")
|
||||
o2s = stdproc.createOrbit2sch()
|
||||
o2s.wireInputPort(name='planet',object=planet)
|
||||
o2s.wireInputPort(name='orbit',object=orbit)
|
||||
o2s.wireInputPort(name='peg',object=peg)
|
||||
o2s.setAverageHeight(H)
|
||||
o2s.orbit2sch()
|
||||
|
||||
# Create Raw Image
|
||||
rawImage = isceobj.createRawImage()
|
||||
filename = frame.getImage().getFilename()
|
||||
bytesPerLine = frame.getImage().getXmax()
|
||||
goodBytes = bytesPerLine - frame.getImage().getXmin()
|
||||
rawImage.setAccessMode('read')
|
||||
rawImage.setByteOrder(frame.getImage().byteOrder)
|
||||
rawImage.setFilename(filename)
|
||||
rawImage.setNumberGoodBytes(goodBytes)
|
||||
rawImage.setWidth(bytesPerLine)
|
||||
rawImage.setXmin(frame.getImage().getXmin())
|
||||
rawImage.setXmax(bytesPerLine)
|
||||
rawImage.createImage()
|
||||
|
||||
# Create SLC Image
|
||||
slcImage = isceobj.createSlcImage()
|
||||
rangeSamplingRate = frame.getInstrument().getRangeSamplingRate()
|
||||
rangePulseDuration = frame.getInstrument().getPulseLength()
|
||||
chirpSize = int(rangeSamplingRate*rangePulseDuration)
|
||||
chirpExtension = 0 #0.5*chirpSize
|
||||
numberRangeBins = int(goodBytes/2) - chirpSize + chirpExtension
|
||||
slcImage.setFilename(filename.replace('.raw','.slc'))
|
||||
slcImage.setByteOrder(frame.getImage().byteOrder)
|
||||
slcImage.setAccessMode('write')
|
||||
slcImage.setDataType('CFLOAT')
|
||||
slcImage.setWidth(numberRangeBins)
|
||||
slcImage.createImage()
|
||||
|
||||
# Calculate motion compenstation correction for Doppler centroid
|
||||
self.logger.info("Correcting Doppler centroid for motion compensation")
|
||||
fdmocomp = stdproc.createFdMocomp()
|
||||
fdmocomp.wireInputPort(name='frame',object=frame)
|
||||
fdmocomp.wireInputPort(name='peg',object=peg)
|
||||
fdmocomp.wireInputPort(name='orbit',object=o2s.getOrbit())
|
||||
fdmocomp.setWidth(numberRangeBins)
|
||||
fdmocomp.setSatelliteHeight(H)
|
||||
fdmocomp.setDopplerCoefficients([fd[0],0.0,0.0,0.0])
|
||||
fdmocomp.fdmocomp()
|
||||
fd[0] = fdmocomp.getDopplerCentroid()
|
||||
self.logger.info("Updated Doppler centroid: %s" % (fd))
|
||||
|
||||
# Calculate the motion compensation Doppler centroid correction plus rate
|
||||
#self.logger.info("Testing new Doppler code")
|
||||
#frate = stdproc.createFRate()
|
||||
#frate.wireInputPort(name='frame',object=frame)
|
||||
#frate.wireInputPort(name='peg', object=peg)
|
||||
#frate.wireInputPort(name='orbit',object=o2s.getOrbit())
|
||||
#frate.wireInputPort(name='planet',object=planet)
|
||||
#frate.setWidth(numberRangeBins)
|
||||
#frate.frate()
|
||||
#fd = frate.getDopplerCentroid()
|
||||
#fdrate = frate.getDopplerRate()
|
||||
#self.logger.info("Updated Doppler centroid and rate: %s %s" % (fd,fdrate))
|
||||
|
||||
synthetic_aperature_length = self._calculateSyntheticAperatureLength(frame,V)
|
||||
|
||||
patchSize = self.nextpow2(2*synthetic_aperature_length)
|
||||
valid_az_samples = patchSize - synthetic_aperature_length
|
||||
rawFileSize = rawImage.getLength()*rawImage.getWidth()
|
||||
linelength = rawImage.getXmax()
|
||||
overhead = patchSize - valid_az_samples
|
||||
numPatches = (1+int((rawFileSize/float(linelength)-overhead)/valid_az_samples))
|
||||
|
||||
# Focus image
|
||||
self.logger.info("Focusing image")
|
||||
focus = stdproc.createFormSLC()
|
||||
focus.wireInputPort(name='rawImage',object=rawImage)
|
||||
focus.wireInputPort(name='slcImage',object=slcImage)
|
||||
focus.wireInputPort(name='orbit',object=o2s.getOrbit())
|
||||
focus.wireInputPort(name='frame',object=frame)
|
||||
focus.wireInputPort(name='peg',object=peg)
|
||||
focus.wireInputPort(name='planet',object=planet)
|
||||
focus.setDebugFlag(96)
|
||||
focus.setBodyFixedVelocity(V)
|
||||
focus.setSpacecraftHeight(H)
|
||||
focus.setAzimuthPatchSize(patchSize)
|
||||
focus.setNumberValidPulses(valid_az_samples)
|
||||
focus.setSecondaryRangeMigrationFlag('n')
|
||||
focus.setNumberAzimuthLooks(1)
|
||||
focus.setNumberPatches(numPatches)
|
||||
focus.setDopplerCentroidCoefficients(fd)
|
||||
#focus.setDopplerCentroidCoefficients([fd[0], 0.0, 0.0])
|
||||
focus.formslc()
|
||||
mocompPos = focus.getMocompPosition()
|
||||
fp = open('position.sch','w')
|
||||
for i in range(len(mocompPos[0])):
|
||||
fp.write("%f %f\n" % (mocompPos[0][i],mocompPos[1][i]))
|
||||
fp.close()
|
||||
|
||||
slcImage.finalizeImage()
|
||||
rawImage.finalizeImage()
|
||||
|
||||
# Recreate the SLC image
|
||||
slcImage = isceobj.createSlcImage()
|
||||
slcImage.setFilename(filename.replace('.raw','.slc'))
|
||||
slcImage.setAccessMode('read')
|
||||
slcImage.setDataType('CFLOAT')
|
||||
slcImage.setWidth(numberRangeBins)
|
||||
slcImage.createImage()
|
||||
width = int(slcImage.getWidth())
|
||||
length = int(slcImage.getLength())
|
||||
|
||||
# Create a frame object and write it out using the Generic driver
|
||||
frame.setImage(slcImage)
|
||||
frame.setOrbit(o2s.getOrbit())
|
||||
#writer = Generic()
|
||||
#writer.frame = frame
|
||||
#writer.write('test.h5',compression='gzip')
|
||||
|
||||
slcImage.finalizeImage()
|
||||
|
||||
self.width = width
|
||||
self.length = length
|
||||
|
||||
def calculateProcessingVelocity(self,frame,peg):
|
||||
"""
|
||||
Calculate the optimal processing velocity and height from the orbit.
|
||||
|
||||
@param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocused SAR data
|
||||
@param peg (\a isceobj.Location.Peg) a Peg point object defining the origin of the SCH coordinate system
|
||||
@return (\a tuple) the processing velocity and satellite height
|
||||
"""
|
||||
from isceobj.Location.SCH import SCH
|
||||
|
||||
orbit = frame.getOrbit()
|
||||
ellipsoid = frame.getInstrument().getPlatform().getPlanet().get_elp()
|
||||
|
||||
# Get the mid point of the orbit
|
||||
midxyz = orbit.interpolateOrbit(frame.getSensingMid())
|
||||
midllh = ellipsoid.xyz_to_llh(midxyz.getPosition())
|
||||
# Calculate the SCH S-velocity
|
||||
sch = SCH(peg=peg)
|
||||
midsch = sch.xyz_to_sch(midxyz.getPosition())
|
||||
midvsch = sch.vxyz_to_vsch(midsch,midxyz.getVelocity())
|
||||
self.logger.debug("XYZ Velocity: %s" % (midxyz.getVelocity()))
|
||||
self.logger.debug("SCH Velocity: %s" % (midvsch))
|
||||
H = midllh[2] # The height at midswath
|
||||
V = midvsch[0] # SCH S-velocity at midswath
|
||||
self.logger.debug("Satellite Height: %s" % (H))
|
||||
return V,H
|
||||
|
||||
def calculatePegPoint(self,frame,orbit,planet):
|
||||
"""
|
||||
Calculate the peg point used as the origin of the SCH coordinate system during focusing.
|
||||
|
||||
@param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocused SAR data
|
||||
@param orbit (\a isceobj.Orbit.Orbit) the orbit along which to calculate the peg point
|
||||
@param planet (\a isceobj.Planet.Planet) the planet around which the satellite is orbiting
|
||||
@return (\a isceobj.Location.Peg) the peg point
|
||||
"""
|
||||
from isceobj.Location.Peg import PegFactory
|
||||
from isceobj.Location.Coordinate import Coordinate
|
||||
|
||||
# First, get the orbit nadir location at mid-swath and the end of the scene
|
||||
midxyz = orbit.interpolateOrbit(frame.getSensingMid())
|
||||
endxyz = orbit.interpolateOrbit(frame.getSensingStop())
|
||||
# Next, calculate the satellite heading from the mid-point to the end of the scene
|
||||
ellipsoid = planet.get_elp()
|
||||
midllh = ellipsoid.xyz_to_llh(midxyz.getPosition())
|
||||
endllh = ellipsoid.xyz_to_llh(endxyz.getPosition())
|
||||
heading = math.degrees(ellipsoid.geo_hdg(midllh,endllh))
|
||||
# Then create a peg point from this data
|
||||
coord = Coordinate(latitude=midllh[0],longitude=midllh[1],height=0.0)
|
||||
peg = PegFactory.fromEllipsoid(coordinate=coord,heading=heading,ellipsoid=ellipsoid)
|
||||
self.logger.debug("Peg Point: %s" % (peg))
|
||||
return peg
|
||||
|
||||
def _calculateSyntheticAperatureLength(self,frame,v):
|
||||
"""
|
||||
Calculate the length of the synthetic aperature in pixels.
|
||||
|
||||
@param frame (\a isceobj.Scene.Frame) the Frame object describing the unfocussed SAR data
|
||||
"""
|
||||
wavelength = frame.getInstrument().getRadarWavelength()
|
||||
prf = frame.getInstrument().getPulseRepetitionFrequency()
|
||||
L = frame.getInstrument().getPlatform().getAntennaLength()
|
||||
farRange = frame.getFarRange()
|
||||
|
||||
syntheticAperatureLength = int(round((wavelength*farRange*prf)/(L*v),0))
|
||||
|
||||
return syntheticAperatureLength
|
||||
|
||||
def nextpow2(self,v):
|
||||
v = v-1
|
||||
v |= v >> 1;
|
||||
v |= v >> 2;
|
||||
v |= v >> 4;
|
||||
v |= v >> 8;
|
||||
v |= v >> 16;
|
||||
v = v+1
|
||||
return v
|
||||
|
||||
def main():
|
||||
import sys
|
||||
import isceobj
|
||||
|
||||
fi = FactoryInit()
|
||||
fi.fileInit = sys.argv[1]
|
||||
fi.defaultInitModule = 'InitFromXmlFile'
|
||||
fi.initComponentFromFile()
|
||||
|
||||
reference = fi.getComponent('Reference')
|
||||
|
||||
focuser = Focuser(rawObj=reference)
|
||||
focuser.focuser()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2011 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
class FormSLCApp(FactoryInit):
|
||||
|
||||
def main(self):
|
||||
self.objFormSlc.formSLCImage(self.objRaw,self.objSlc)
|
||||
print('second time')
|
||||
self.objFormSlc.formSLCImage(self.objRaw,self.objSlc)
|
||||
self.objSlc.finalizeImage()
|
||||
self.objRaw.finalizeImage()
|
||||
return
|
||||
|
||||
def __init__(self, arglist):
|
||||
FactoryInit.__init__(self)
|
||||
self.initFactory(arglist)
|
||||
self.objSlc = self.getComponent('SlcImage')
|
||||
self.objSlc.createImage()
|
||||
self.objRaw = self.getComponent('RawImage')
|
||||
self.objRaw.createImage()
|
||||
self.objFormSlc = self.getComponent('FormSlc')
|
||||
return
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
runObj = FormSLCApp(sys.argv[1:])
|
||||
runObj.main()
|
||||
|
||||
|
|
@ -0,0 +1,140 @@
|
|||
#!/usr/bin/env python3
|
||||
#
|
||||
# Author: Bekaert David
|
||||
# Year: 2017
|
||||
|
||||
import os
|
||||
import sys
|
||||
import argparse
|
||||
from osgeo import gdal
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
|
||||
|
||||
# command line parsing of input file
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
parser = argparse.ArgumentParser(description='Generate ISCE xml from gdal products')
|
||||
parser.add_argument('-i','--input', dest='fname', type=str, required=True, help='Input filename (GDAL supported)')
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def gdal2isce_xml(fname):
|
||||
"""
|
||||
Generate ISCE xml file from gdal supported file
|
||||
|
||||
Example: import isce
|
||||
from applications.gdal2isce_xml import gdal2isce_xml
|
||||
xml_file = gdal2isce_xml(fname+'.vrt')
|
||||
"""
|
||||
|
||||
# open the GDAL file and get typical data informationi
|
||||
GDAL2ISCE_DATATYPE = {
|
||||
1 : 'BYTE',
|
||||
2 : 'uint16',
|
||||
3 : 'SHORT',
|
||||
4 : 'uint32',
|
||||
5 : 'INT',
|
||||
6 : 'FLOAT',
|
||||
7 : 'DOUBLE',
|
||||
10: 'CFLOAT',
|
||||
11: 'complex128',
|
||||
}
|
||||
# GDAL2NUMPY_DATATYPE = {
|
||||
# 1 : np.uint8,
|
||||
# 2 : np.uint16,
|
||||
# 3 : np.int16,
|
||||
# 4 : np.uint32,
|
||||
# 5 : np.int32,
|
||||
# 6 : np.float32,
|
||||
# 7 : np.float64,
|
||||
# 10: np.complex64,
|
||||
# 11: np.complex128,
|
||||
# }
|
||||
|
||||
# check if the input file is a vrt
|
||||
fbase, fext = os.path.splitext(fname)
|
||||
print(fext)
|
||||
if fext == ".vrt":
|
||||
outname = fbase
|
||||
else:
|
||||
outname = fname
|
||||
print(outname)
|
||||
|
||||
# open the GDAL file and get typical ds information
|
||||
ds = gdal.Open(fname, gdal.GA_ReadOnly)
|
||||
width = ds.RasterXSize
|
||||
length = ds.RasterYSize
|
||||
bands = ds.RasterCount
|
||||
print("width: " + "\t" + str(width))
|
||||
print("length: " + "\t" + str(length))
|
||||
print("num of bands:" + "\t" + str(bands))
|
||||
|
||||
# getting the datatype information
|
||||
raster = ds.GetRasterBand(1)
|
||||
dataTypeGdal = raster.DataType
|
||||
|
||||
# user look-up dictionary from gdal to isce format
|
||||
dataType= GDAL2ISCE_DATATYPE[dataTypeGdal]
|
||||
print("dataType: " + "\t" + str(dataType))
|
||||
|
||||
# transformation contains gridcorners (lines/pixels or lonlat and the spacing 1/-1 or deltalon/deltalat)
|
||||
transform = ds.GetGeoTransform()
|
||||
# if a complex data type, then create complex image
|
||||
# if a real data type, then create a regular image
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.setFilename(os.path.abspath(outname))
|
||||
img.setWidth(width)
|
||||
img.setLength(length)
|
||||
img.setAccessMode('READ')
|
||||
img.bands = bands
|
||||
img.dataType = dataType
|
||||
|
||||
# interleave
|
||||
md = ds.GetMetadata('IMAGE_STRUCTURE')
|
||||
sch = md.get('INTERLEAVE', None)
|
||||
if sch == 'LINE':
|
||||
img.scheme = 'BIL'
|
||||
elif sch == 'PIXEL':
|
||||
img.scheme = 'BIP'
|
||||
elif sch == 'BAND':
|
||||
img.scheme = 'BSQ'
|
||||
else:
|
||||
print('Unrecognized interleaving scheme, {}'.format(sch))
|
||||
if bands < 2:
|
||||
print('Assuming default, BIP')
|
||||
img.scheme = 'BIP'
|
||||
else:
|
||||
print('Assuming default, BSQ')
|
||||
img.scheme = 'BSQ'
|
||||
|
||||
img.firstLongitude = transform[0]
|
||||
img.firstLatitude = transform[3]
|
||||
img.deltaLatitude = transform[5]
|
||||
img.deltaLongitude = transform[1]
|
||||
|
||||
xml_file = outname + ".xml"
|
||||
img.dump(xml_file)
|
||||
|
||||
return xml_file
|
||||
|
||||
|
||||
# main script
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
|
||||
# Parse command line
|
||||
inps = cmdLineParse()
|
||||
|
||||
# check if the input file exist
|
||||
if not os.path.isfile(inps.fname):
|
||||
raise Exception('Input file is not found ....')
|
||||
|
||||
gdal2isce_xml(inps.fname)
|
||||
|
||||
|
|
@ -0,0 +1,451 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Piyush Agram
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import argparse
|
||||
import symtable
|
||||
import math
|
||||
import numpy as np
|
||||
import os
|
||||
import sys
|
||||
|
||||
import isce
|
||||
from isceobj.Util.ImageUtil import ImageLib as IML
|
||||
|
||||
#####Global parameters
|
||||
iMath = {
|
||||
'outFile' : None, ####Output file name
|
||||
'outBands' : [], ####List of out band mmaps
|
||||
'outScheme' : 'BSQ', ####Output scheme
|
||||
'equations' : [], #####List of math equations
|
||||
'outType' : 'f', ####Output datatype
|
||||
'width' : None, ####Width of images
|
||||
'length' : None, ####Length of images
|
||||
'inBands' : {}, ####Dictionary of input band mmaps
|
||||
'inFiles' : {}, ####Dictionary input file mmaps
|
||||
'bboxes' : [] ####Bounding boxes for input mmaps
|
||||
}
|
||||
|
||||
|
||||
helpStr = """
|
||||
|
||||
ISCE Band image with imageMath.py
|
||||
|
||||
Examples:
|
||||
*********
|
||||
|
||||
1) imageMath.py -e='a*exp(-1.0*J*arg(b))' -o test.int -t cfloat --a=resampOnlyImage.int --b=topophase.mph
|
||||
This uses phase from topophase.mph to correct topophase from the interferograms
|
||||
|
||||
2) imageMath.py -e='a_0;a_1' --a=resampOnlyImage.amp -o test.amp -s BIL
|
||||
This converts a BIP image to a BIL image
|
||||
|
||||
3) imageMath.py -e="abs(a);sqrt(b_0**2 + b_1**2)" --a=topophase.flat --b="topophase.mph;3419;float;2;BIP" -o test.mag -s BIL
|
||||
This should produce a BIL (RMG) image where both channels are equal. Input the correct width before testing this.
|
||||
|
||||
Rules:
|
||||
******
|
||||
|
||||
0) Input math expressions should be valid python expressions.
|
||||
|
||||
1) A math expression for every band of output image is needed. For a multi-band output image, these expressions are separated by a ;.
|
||||
Example: See Example 2 above.
|
||||
|
||||
2) All variable names in the math expressions need to be lower case, single character. Capital characters and multi-char names are reserved for constants and functions respectively.
|
||||
|
||||
3) The band of multi-band input images are represented by adding _i to the variable name, where "i" is the band number. All indices are zero-based (C and python).
|
||||
Example : a_0 represents the first band of the image represented by variable "a".
|
||||
|
||||
4) For a single band image, the _0 band notation is optional.
|
||||
Example: a_0 and a are equivalent for a single band image.
|
||||
|
||||
5) For every lower case variable in the equations, another input "--varname" is needed. Example shown above where --a and --b are defined.
|
||||
|
||||
6) Variables can be defined in two ways:
|
||||
a) File name (assuming an ISCE .xml file also exists).
|
||||
Example --a=resamp.int
|
||||
|
||||
b) Image grammar: "Filename;width;datatype;bands;scheme"
|
||||
Example --a="resamp.int;3200;cfloat;1;BSQ"
|
||||
|
||||
- Default value for datatype=float
|
||||
- Default value for bands = 1
|
||||
- Default value for scheme = BSQ
|
||||
|
||||
c) In the image grammar: Single character codes for datatypes are case sensitive (Numpy convention) whereas multi-character codes are case-insensitive. Internally, everything is translated to numpy convention by the code before processing.
|
||||
"""
|
||||
|
||||
|
||||
class NumericStringParser(object):
|
||||
'''
|
||||
Parse the input expression using Python's inbuilt parser.
|
||||
'''
|
||||
def __init__(self, num_string):
|
||||
'''
|
||||
Create a parser object with input string.
|
||||
'''
|
||||
self.string = num_string
|
||||
self._restricted = list(IML.fnDict.keys()) + list(IML.constDict.keys())
|
||||
|
||||
def parse(self):
|
||||
'''
|
||||
Parse the input expression to get list of identifiers.
|
||||
'''
|
||||
|
||||
try:
|
||||
symTable = symtable.symtable(self.string, 'string', 'eval')
|
||||
except:
|
||||
raise IOError('Not a valid python math expression \n' +
|
||||
self.string)
|
||||
|
||||
idents = symTable.get_identifiers()
|
||||
|
||||
known = []
|
||||
unknown = []
|
||||
for ident in idents:
|
||||
if ident not in self._restricted:
|
||||
unknown.append(ident)
|
||||
else:
|
||||
known.append(ident)
|
||||
|
||||
|
||||
for val in unknown:
|
||||
band = val.split('_')[0]
|
||||
if len(band)!=1:
|
||||
raise IOError('Multi character variables in input expressions represent functions or constants. Unknown function or constant : %s'%(val))
|
||||
|
||||
elif (band.lower() != band):
|
||||
raise IOError('Single character upper case letters are used for constant. No available constant named %s'%(val))
|
||||
|
||||
return unknown, known
|
||||
|
||||
#######Command line parsing
|
||||
def detailedHelp():
|
||||
'''
|
||||
Return the detailed help message.
|
||||
'''
|
||||
msg = helpStr + '\n\n'+ \
|
||||
'Available Functions \n' + \
|
||||
'********************\n' + \
|
||||
str(IML.fnDict.keys()) + '\n\n' + \
|
||||
'Available Constants \n' + \
|
||||
'********************\n' + \
|
||||
str(IML.constDict.keys()) + '\n\n' + \
|
||||
'Available DataTypes -> numpy code mapping \n' + \
|
||||
'***************************************** \n'+ \
|
||||
IML.printNUMPYMap() + '\n'
|
||||
|
||||
return msg
|
||||
|
||||
class customArgparseAction(argparse.Action):
|
||||
def __call__(self, parser, args, values, option_string=None):
|
||||
'''
|
||||
The action to be performed.
|
||||
'''
|
||||
print(detailedHelp())
|
||||
parser.print_help()
|
||||
parser.exit()
|
||||
|
||||
def firstPassCommandLine():
|
||||
'''
|
||||
Take a first parse at command line parsing.
|
||||
Read only the basic required fields
|
||||
'''
|
||||
|
||||
#####Create the generic parser to get equation and output format first
|
||||
parser = argparse.ArgumentParser(description='ISCE Band math calculator.',
|
||||
formatter_class=IML.customArgparseFormatter)
|
||||
|
||||
# help_parser = subparser.add_
|
||||
parser.add_argument('-H','--hh', nargs=0, action=customArgparseAction,
|
||||
help='Display detailed help information.')
|
||||
parser.add_argument('-e','--eval', type=str, required=True, action='store',
|
||||
help='Expression to evaluate.', dest='equation')
|
||||
parser.add_argument('-o','--out', type=str, default=None, action='store',
|
||||
help='Name of the output file', dest='out')
|
||||
parser.add_argument('-s','--scheme',type=str, default='BSQ', action='store',
|
||||
help='Output file format.', dest='scheme')
|
||||
parser.add_argument('-t','--type', type=str, default='float', action='store',
|
||||
help='Output data type.', dest='dtype')
|
||||
parser.add_argument('-d','--debug', action='store_true', default=False,
|
||||
help='Print debugging statements', dest='debug')
|
||||
parser.add_argument('-n','--noxml', action='store_true', default=False,
|
||||
help='Do not create an ISCE XML file for the output.', dest='noxml')
|
||||
|
||||
#######Parse equation and output format first
|
||||
args, files = parser.parse_known_args()
|
||||
|
||||
#####Check the output scheme for errors
|
||||
if args.scheme.upper() not in ['BSQ', 'BIL', 'BIP']:
|
||||
raise IOError('Unknown output scheme: %s'%(args.scheme))
|
||||
iMath['outScheme'] = args.scheme.upper()
|
||||
|
||||
npType = IML.NUMPY_type(args.dtype)
|
||||
iMath['outType'] = npType
|
||||
|
||||
return args, files
|
||||
|
||||
|
||||
def parseInputFile(varname, args):
|
||||
'''
|
||||
Get the input string corresponding to given variable name.
|
||||
'''
|
||||
|
||||
inarg = varname.strip()
|
||||
####Keyname corresponds to specific
|
||||
key = '--' + inarg
|
||||
|
||||
if len(varname.strip()) > 1:
|
||||
raise IOError('Input variable names should be single characters.\n' +
|
||||
'Invalid variable name: %s'%varname)
|
||||
|
||||
if (inarg != inarg.lower()):
|
||||
raise IOError('Input variable names should be lower case. \n' +
|
||||
'Invalud variable name: %s'%varname)
|
||||
|
||||
#####Create a simple parser
|
||||
parser = IML.customArgumentParser(description='Parser for band math.',
|
||||
add_help=False)
|
||||
parser.add_argument(key, type=str, required=True, action='store',
|
||||
help='Input string for a particular variable.', dest='instr')
|
||||
|
||||
try:
|
||||
infile, rest = parser.parse_known_args(args)
|
||||
except:
|
||||
raise SyntaxError('Input file : "%s" not defined on command line'%varname)
|
||||
return infile.instr, rest
|
||||
|
||||
|
||||
def createNamespace():
|
||||
'''
|
||||
Hand utility if you want to use imageMath.py from within other python code.
|
||||
'''
|
||||
from argparse import Namespace
|
||||
g = Namespace()
|
||||
g.debug = False
|
||||
g.dtype = 'float'
|
||||
g.equation = None
|
||||
g.hh = None
|
||||
g.noxml = False
|
||||
g.out = None
|
||||
g.scheme = None
|
||||
return g
|
||||
|
||||
def mergeBbox(inlist):
|
||||
'''
|
||||
Merge Bboxes of input files.
|
||||
'''
|
||||
if len(inlist) == 0 :
|
||||
return None
|
||||
|
||||
|
||||
ref = np.array(inlist[0])
|
||||
|
||||
diff = np.zeros((len(inlist), 4))
|
||||
for ind in range(1, len(inlist)):
|
||||
cand = np.array(inlist[ind])
|
||||
diff[ind,: ] = cand - ref
|
||||
|
||||
diff = np.max(np.abs(diff), axis=0)
|
||||
|
||||
if np.any(diff > 1.0e-5):
|
||||
print('Bounding boxes dont match. Not adding bbox info.')
|
||||
return None
|
||||
else:
|
||||
return ref
|
||||
|
||||
#######The main driver that puts everything together
|
||||
def main(args, files):
|
||||
#######Set up logger appropriately
|
||||
logger = IML.createLogger(args.debug, name='imageMath')
|
||||
logger.debug('Known: '+ str(args))
|
||||
logger.debug('Optional: '+ str(files))
|
||||
|
||||
|
||||
#######Determine number of input and output bands
|
||||
bandList = []
|
||||
iMath['equations'] = []
|
||||
for ii,expr in enumerate(args.equation.split(';')):
|
||||
|
||||
#####Now parse the equation to get the file names used
|
||||
nsp = NumericStringParser(expr.strip())
|
||||
logger.debug('Input Expression: %d : %s'%(ii, expr))
|
||||
bands, known = nsp.parse()
|
||||
logger.debug('Unknown variables: ' + str(bands))
|
||||
logger.debug('Known variables: ' + str(known))
|
||||
|
||||
iMath['equations'].append(expr)
|
||||
bandList = bandList + bands
|
||||
|
||||
bandList = IML.uniqueList(bandList)
|
||||
|
||||
numOutBands = len(iMath['equations'])
|
||||
logger.debug('Number of output bands = %d'%(numOutBands))
|
||||
logger.debug('Number of input bands used = %d'%(len(bandList)))
|
||||
logger.debug('Input bands used = ' + str(bandList))
|
||||
|
||||
|
||||
#####Determine unique images from the bandList
|
||||
fileList = IML.bandsToFiles(bandList, logger)
|
||||
|
||||
|
||||
######Create input memmaps
|
||||
for ii,infile in enumerate(fileList):
|
||||
if type(files) == list:
|
||||
fstr, files = parseInputFile(infile, files)
|
||||
else:
|
||||
fstr = getattr(files, infile)
|
||||
|
||||
logger.debug('Input string for File %d: %s: %s'%(ii, infile, fstr))
|
||||
|
||||
if len(fstr.split(';')) > 1:
|
||||
fmap = IML.mmapFromStr(fstr, logger)
|
||||
bbox = None
|
||||
else:
|
||||
fmap = IML.mmapFromISCE(fstr, logger)
|
||||
bbox = IML.getGeoInfo(fstr)
|
||||
|
||||
|
||||
iMath['inFiles'][infile] = fmap
|
||||
|
||||
if len(fmap.bands) == 1:
|
||||
iMath['inBands'][infile] = fmap.bands[0]
|
||||
|
||||
for ii in range(len(fmap.bands)):
|
||||
iMath['inBands']['%s_%d'%(infile, ii)] = fmap.bands[ii]
|
||||
|
||||
if bbox is not None:
|
||||
iMath['bboxes'].append(bbox)
|
||||
|
||||
if type(files) == list:
|
||||
if len(files):
|
||||
raise IOError('Unused input variables set:\n'+ ' '.join(files))
|
||||
|
||||
#######Some debugging
|
||||
logger.debug('List of available bands: ' + str(iMath['inBands'].keys()))
|
||||
|
||||
####If used in calculator mode.
|
||||
if len(bandList) == 0:
|
||||
dataDict=dict(IML.fnDict.items() + IML.constDict.items())
|
||||
logger.info('Calculator mode. No output files created')
|
||||
for ii, equation in enumerate(iMath['equations']):
|
||||
res=eval(expr, dataDict)
|
||||
logger.info('Output Band %d : %f '%(ii, res))
|
||||
|
||||
sys.exit(0)
|
||||
else:
|
||||
if args.out is None:
|
||||
raise IOError('Output file has not been defined.')
|
||||
|
||||
#####Check if all bands in bandList have been accounted for
|
||||
for band in bandList:
|
||||
if band not in iMath['inBands'].keys():
|
||||
raise ValueError('Undefined band : %s '%(band))
|
||||
|
||||
######Check if all the widths match
|
||||
widths = [img.width for var,img in iMath['inFiles'].items() ]
|
||||
if len(widths) != widths.count(widths[0]):
|
||||
logger.debug('Widths of images: ' +
|
||||
str([(var, img.name, img.width) for var,img in iMath['inFiles'].items()]))
|
||||
raise IOError('Input images are not of same width')
|
||||
|
||||
iMath['width'] = widths[0]
|
||||
logger.debug('Output Width = %d'%(iMath['width']))
|
||||
|
||||
#######Check if all the lengths match
|
||||
lengths=[img.length for var,img in iMath['inFiles'].items()]
|
||||
if len(lengths) != lengths.count(lengths[0]):
|
||||
logger.debug('Lengths of images: ' +
|
||||
str([(var, img.name, img.length) for var,img in iMath['inFiles'].items()]))
|
||||
|
||||
raise IOError('Input images are not of the same length')
|
||||
|
||||
iMath['length'] = lengths[0]
|
||||
logger.debug('Output Length = %d'%(iMath['length']))
|
||||
|
||||
#####Now create the output file
|
||||
outmap = IML.memmap(args.out, mode='write', nchannels=numOutBands,
|
||||
nxx=iMath['width'], nyy=iMath['length'], scheme=iMath['outScheme'],
|
||||
dataType=iMath['outType'])
|
||||
|
||||
logger.debug('Creating output ISCE mmap with \n' +
|
||||
'file = %s \n'%(args.out) +
|
||||
'bands = %d \n'%(numOutBands) +
|
||||
'width = %d \n'%(iMath['width']) +
|
||||
'length = %d \n'%(iMath['length'])+
|
||||
'scheme = %s \n'%(iMath['outScheme']) +
|
||||
'dtype = %s \n'%(iMath['outType']))
|
||||
|
||||
iMath['outBands'] = outmap.bands
|
||||
|
||||
#####Start evaluating the expressions
|
||||
|
||||
####Set up the name space to use
|
||||
dataDict=dict(IML.fnDict.items() | IML.constDict.items())
|
||||
bands = iMath['inBands']
|
||||
outBands = iMath['outBands']
|
||||
|
||||
####Array representing columns
|
||||
dataDict['COL'] = np.arange(iMath['width'], dtype=np.float32)
|
||||
|
||||
#####Replace ^ by **
|
||||
for lineno in range(int(iMath['length'])):
|
||||
|
||||
####Setting row number
|
||||
dataDict['ROW'] = lineno*1.0
|
||||
|
||||
####Load one line from each of the the bands
|
||||
for band in bandList: #iMath['inBands'].iteritems():
|
||||
dataDict[band] = bands[band][lineno,:]
|
||||
|
||||
####For each output band
|
||||
for kk,expr in enumerate(iMath['equations']):
|
||||
res = eval(expr, dataDict)
|
||||
outBands[kk][lineno,:] = res
|
||||
|
||||
######Determine common bbox if any
|
||||
outputBbox = mergeBbox(iMath['bboxes'])
|
||||
|
||||
######Render ISCE XML if needed
|
||||
if not args.noxml:
|
||||
IML.renderISCEXML(args.out, numOutBands,
|
||||
iMath['length'], iMath['width'],
|
||||
iMath['outType'], iMath['outScheme'],
|
||||
bbox = outputBbox,
|
||||
descr = ' '.join(sys.argv))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args, files = firstPassCommandLine()
|
||||
print('args: ', args)
|
||||
print('files: ', files)
|
||||
main(args, files)
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,116 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import numpy as np
|
||||
import os
|
||||
import argparse
|
||||
import tempfile
|
||||
|
||||
try:
|
||||
from osgeo import gdal
|
||||
gdal.UseExceptions()
|
||||
except ImportError:
|
||||
raise Exception('gdal python bindings are needed for this script to work.')
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Generate graphics from ISCE products using gdal')
|
||||
parser.add_argument('-i', dest='infile', type=str, required=True,
|
||||
help='Input ISCE product file')
|
||||
parser.add_argument('-o', dest='outfile', type=str, required=True,
|
||||
help='Output GEOTIFF file')
|
||||
parser.add_argument('-b', dest='band', type=int, default=0,
|
||||
help='Band number to use if input image is multiband. Default: 0')
|
||||
parser.add_argument('-c', dest='clim', type=float, nargs=2, required=True,
|
||||
help='Color limits for the graphics')
|
||||
parser.add_argument('-m', dest='cmap', type=str, default='jet',
|
||||
help='Matplotlib colormap to use')
|
||||
parser.add_argument('-t', dest='table', type=str, default=None,
|
||||
help='Color table to use')
|
||||
parser.add_argument('-n', dest='ncolors', type=int, default=64,
|
||||
help='Number of colors')
|
||||
inps = parser.parse_args()
|
||||
|
||||
return inps
|
||||
|
||||
|
||||
def get_cmap(mapname, N, clim):
|
||||
'''
|
||||
Get the colormap from matplotlib.
|
||||
'''
|
||||
|
||||
try:
|
||||
import matplotlib.pyplot as plt
|
||||
import matplotlib.colors as colors
|
||||
import matplotlib.cm as cmx
|
||||
except ImportError:
|
||||
raise Exception('Matplotlib is needed if user-defined color table is not provided.')
|
||||
|
||||
cmap = plt.get_cmap(mapname)
|
||||
cNorm = colors.Normalize(vmin = clim[0], vmax = clim[1])
|
||||
scalarMap = cmx.ScalarMappable(norm=cNorm, cmap=cmap)
|
||||
|
||||
vals = np.linspace(inps.clim[0], inps.clim[1], endpoint=True)
|
||||
|
||||
outname = mapname + '.cpt'
|
||||
|
||||
with open(outname, 'w') as fid:
|
||||
for val in vals:
|
||||
cval = scalarMap.to_rgba(val)
|
||||
fid.write('{0} {1} {2} {3} \n'.format(val,int(cval[0]*255), int(cval[1]*255), int(cval[2]*255)))
|
||||
|
||||
fid.write('nv 0 0 0 0 \n')
|
||||
|
||||
return outname
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
|
||||
#Parse command line
|
||||
inps = cmdLineParse()
|
||||
|
||||
|
||||
####Convert to a gdal format if not already done
|
||||
try:
|
||||
ds = gdal.Open(inps.infile)
|
||||
ds = None
|
||||
except:
|
||||
cmd = 'isce2gis.py envi -i {0}'.format(inps.infile)
|
||||
flag = os.system(cmd)
|
||||
|
||||
if flag:
|
||||
raise Exception('Failed: {0}'.format(cmd))
|
||||
|
||||
####Set up the color table
|
||||
if inps.table is None: ####No custom color map has been provided
|
||||
cmap = get_cmap(inps.cmap, inps.ncolors, inps.clim)
|
||||
plt_cmap = True
|
||||
else:
|
||||
cmap = inps.table
|
||||
plt_cmap = False
|
||||
|
||||
|
||||
#####Build VRT
|
||||
vrtname = inps.outfile+'.vrt'
|
||||
if os.path.exists(vrtname):
|
||||
print('VRT file already exists. Cleaning it ....')
|
||||
os.remove(vrtname)
|
||||
|
||||
cmd = 'gdaldem color-relief {0} {1} {2} -alpha -b {3} -of VRT'.format(inps.infile, cmap, vrtname, inps.band+1)
|
||||
|
||||
flag = os.system(cmd)
|
||||
if flag:
|
||||
raise Exception('Failed: %s'%(cmd))
|
||||
|
||||
###Build geotiff
|
||||
cmd = 'gdal_translate {0} {1}'.format(vrtname, inps.outfile)
|
||||
|
||||
flag = os.system(cmd)
|
||||
|
||||
if flag:
|
||||
raise Exception('Failed: %s'%(cmd))
|
||||
|
||||
|
|
@ -0,0 +1,151 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import argparse
|
||||
import os
|
||||
import xml.etree.ElementTree as ET
|
||||
from imageMath import IML
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command line parser.
|
||||
'''
|
||||
|
||||
parser = argparse.ArgumentParser(description='Export ISCE products directly to ENVI / VRT formats')
|
||||
|
||||
subparsers = parser.add_subparsers(help='Output format options', dest='fmt')
|
||||
|
||||
vrtparser = subparsers.add_parser( 'vrt', help='Export with VRT file')
|
||||
vrtparser.add_argument('-i', '--input', dest='infile', type=str, required=True,
|
||||
help='ISCE product file to export')
|
||||
vrtparser.add_argument('--lat', dest='latvrt', type=str, default=None,
|
||||
help='Location of the latitude file')
|
||||
vrtparser.add_argument('--lon', dest='lonvrt', type=str, default=None,
|
||||
help='Location of the longitude file')
|
||||
|
||||
enviparser = subparsers.add_parser('envi', help='Export with ENVI hdr file')
|
||||
enviparser.add_argument('-i', '--input', dest='infile', type=str, required=True,
|
||||
help='ISCE product file to export')
|
||||
|
||||
vals = parser.parse_args()
|
||||
# print(vals)
|
||||
return vals
|
||||
|
||||
|
||||
def isce2envi(inname):
|
||||
'''
|
||||
Create ENVI hdr for ISCSE product.
|
||||
'''
|
||||
img, dataname, metaname = IML.loadImage(inname)
|
||||
img.renderEnviHDR()
|
||||
|
||||
return
|
||||
|
||||
|
||||
def isce2vrt(inname):
|
||||
'''
|
||||
Create VRT for ISCE product.
|
||||
'''
|
||||
img, dataname, metaname = IML.loadImage(inname)
|
||||
img.renderVRT()
|
||||
return
|
||||
|
||||
|
||||
def getVRTinfo(inname):
|
||||
'''
|
||||
Verify if the lat / lon VRT info is appropriate.
|
||||
'''
|
||||
|
||||
tree = ET.parse(inname.strip() + '.vrt')
|
||||
root = tree.getroot()
|
||||
|
||||
width = int(root.attrib['rasterXSize'])
|
||||
length = int(root.attrib['rasterYSize'])
|
||||
|
||||
bands = len(root.find('VRTRasterBand'))
|
||||
|
||||
if bands != 1:
|
||||
raise Exception('%s is not a one band image'%(inname+'.vrt'))
|
||||
|
||||
return (width, length)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Main driver.
|
||||
'''
|
||||
|
||||
inps = cmdLineParse()
|
||||
|
||||
if inps.fmt == 'envi':
|
||||
isce2envi(inps.infile)
|
||||
|
||||
elif inps.fmt == 'vrt':
|
||||
|
||||
if (inps.latvrt is None) or (inps.lonvrt is None):
|
||||
isce2vrt(inps.infile)
|
||||
|
||||
else:
|
||||
# latf = inps.latvrt + '.vrt'
|
||||
# if not os.path.exists(latf):
|
||||
isce2vrt(inps.latvrt)
|
||||
|
||||
# lonf = inps.lonvrt + '.vrt'
|
||||
# if not os.path.exists(lonf):
|
||||
isce2vrt(inps.lonvrt)
|
||||
|
||||
latimg, dummy, dummy = IML.loadImage(inps.latvrt)
|
||||
latwid = latimg.getWidth()
|
||||
latlgt = latimg.getLength()
|
||||
if latimg.getBands() != 1:
|
||||
raise Exception('Latitude image should be single band')
|
||||
|
||||
|
||||
lonimg, dummy, dummy = IML.loadImage(inps.lonvrt)
|
||||
lonwid = lonimg.getWidth()
|
||||
lonlgt = lonimg.getLength()
|
||||
|
||||
if lonimg.getBands() != 1:
|
||||
raise Exception('Longitude image should be single band')
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(inps.infile + '.xml')
|
||||
wid = img.getWidth()
|
||||
lgt = img.getLength()
|
||||
|
||||
if any([(latwid - wid) != 0, (lonwid - wid) != 0]):
|
||||
raise Exception('Widths of image, lat and lon files dont match')
|
||||
|
||||
if any([(latlgt - lgt) != 0, (lonlgt - lgt) != 0]):
|
||||
raise Exception('Lengths of image, lat and lon files dont match')
|
||||
|
||||
####Create prelim XML
|
||||
isce2vrt(inps.infile)
|
||||
tree = ET.parse(inps.infile + '.vrt')
|
||||
root = tree.getroot()
|
||||
|
||||
meta = ET.SubElement(root, 'metadata')
|
||||
meta.attrib['domain'] = "GEOLOCATION"
|
||||
meta.tail = '\n'
|
||||
meta.text = '\n '
|
||||
|
||||
|
||||
rdict = { 'Y_DATASET' : os.path.relpath(inps.latvrt + '.vrt', os.path.dirname(inps.infile)),
|
||||
'X_DATASET' : os.path.relpath(inps.lonvrt + '.vrt', os.path.dirname(inps.infile)),
|
||||
'X_BAND' : "1",
|
||||
'Y_BAND' : "1",
|
||||
'PIXEL_OFFSET': "0",
|
||||
'LINE_OFFSET' : "0",
|
||||
'LINE_STEP' : "1",
|
||||
'PIXEL_STEP' : "1" }
|
||||
|
||||
for key, val in rdict.items():
|
||||
data = ET.SubElement(meta, 'mdi')
|
||||
data.text = val
|
||||
data.attrib['key'] = key
|
||||
data.tail = '\n '
|
||||
|
||||
data.tail = '\n'
|
||||
tree.write(inps.infile + '.vrt')
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,559 @@
|
|||
#!/usr/bin/env python3
|
||||
#Author:Giangi Sacco
|
||||
#Copyright 2009-2014, by the California Institute of Technology.
|
||||
import isce
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import argparse
|
||||
import collections
|
||||
import importlib
|
||||
from iscesys.DictUtils.DictUtils import DictUtils as DU
|
||||
|
||||
class Helper(object):
|
||||
|
||||
def getRegistered(self):
|
||||
#Register all the factory that want to provide help
|
||||
#Each .hlp file has a json structure like
|
||||
'''
|
||||
{TypeName
|
||||
{'args':
|
||||
{
|
||||
#positional arguments have as key the position in str format
|
||||
#since json only allows keys to be string
|
||||
'0':{'value':values,'type':type},
|
||||
'1':{'value':values,'type':type}
|
||||
#keyword arguments have the name of the argument as key
|
||||
argname:{'value':values,'type':type,'optional':bool,'default':default}
|
||||
},
|
||||
'factory':factory,
|
||||
'package':package,
|
||||
}
|
||||
}
|
||||
'''
|
||||
registered = {}
|
||||
helplist = os.listdir(self._helpDir)
|
||||
for name in helplist:
|
||||
fullname = os.path.join(self._helpDir,name)
|
||||
if not name.endswith('.hlp'):
|
||||
continue
|
||||
with open(fullname) as fp:
|
||||
registered.update(json.load(fp))
|
||||
|
||||
return collections.OrderedDict(sorted(registered.items()))
|
||||
|
||||
def getTypeFromFactory(self,factory):
|
||||
instanceType = 'N/A'
|
||||
for k,v in self._registered.items():
|
||||
if v['factory'] == factory:
|
||||
instanceType = k
|
||||
break
|
||||
return instanceType
|
||||
|
||||
def getInstance(self,typeobj):
|
||||
obj2help = self._registered[typeobj]
|
||||
args,kwargs = self.getPosAndKwArgs(obj2help)
|
||||
factory = getattr(importlib.import_module(obj2help['package']),obj2help['factory'])
|
||||
return factory(*args,**kwargs)
|
||||
|
||||
def convert(self,value,type_):
|
||||
|
||||
try:
|
||||
module = importlib.import_module('builtins')
|
||||
ret = getattr(module,type_)(value)
|
||||
except:
|
||||
print("Cannot convert",value,"to a type",type_)
|
||||
raise Exception
|
||||
return ret
|
||||
|
||||
def askHelp(self, instance, steps=False):
|
||||
#since it can be called externally, make sure that we remove the
|
||||
#arguments that are not understood by the isce Parser
|
||||
try:
|
||||
sys.argv = [sys.argv[0]]
|
||||
instance._parameters()
|
||||
instance.initProperties({})
|
||||
instance._init()
|
||||
instance._facilities()
|
||||
instance._dictionaryOfFacilities = DU.renormalizeKeys(instance._dictionaryOfFacilities)
|
||||
self.helper(instance, steps)
|
||||
except Exception as e:
|
||||
print("No help available.")
|
||||
def getPosAndKwArgs(self,obj):
|
||||
args = []
|
||||
kwargs = {}
|
||||
if self._inputs.args:#otherwise no args present
|
||||
for arg,i in zip(self._inputs.args,range(len(self._inputs.args))):
|
||||
try:
|
||||
#positional argument
|
||||
args.append(self.convert(arg,obj['args'][str(i)]['type']))
|
||||
except Exception as e:
|
||||
try:
|
||||
kw,val = arg.split("=")
|
||||
kwargs[kw] = self.convert(val,obj['args'][kw]['type'])
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise
|
||||
|
||||
return (args,kwargs)
|
||||
|
||||
def step_help(self, instance):
|
||||
instance.help_steps()
|
||||
instance._add_methods()
|
||||
instance._steps()
|
||||
print()
|
||||
print("Command line options for steps processing are formed by")
|
||||
print("combining the following three options as required:\n")
|
||||
print("'--start=<step>', '--end=<step>', '--dostep=<step>'\n")
|
||||
print("The step names are chosen from the following list:")
|
||||
print()
|
||||
npl = 5
|
||||
nfl = int(len(instance.step_list_help)/npl)
|
||||
for i in range(nfl):
|
||||
print(instance.step_list[i*npl:(i+1)*npl])
|
||||
if len(instance.step_list) % npl:
|
||||
print(instance.step_list[nfl*npl:])
|
||||
print()
|
||||
print("If --start is missing, then processing starts at the "+
|
||||
"first step.")
|
||||
print("If --end is missing, then processing ends at the final "+
|
||||
"step.")
|
||||
print("If --dostep is used, then only the named step is "+
|
||||
"processed.")
|
||||
print()
|
||||
print("In order to use either --start or --dostep, it is "+
|
||||
"necessary that a")
|
||||
print("previous run was done using one of the steps options "+
|
||||
"to process at least")
|
||||
print("through the step immediately preceding the starting "+
|
||||
"step of the current run.")
|
||||
print()
|
||||
sys.exit(0)
|
||||
|
||||
|
||||
def helper(self,instance,steps=False):
|
||||
#if facility is None we print the top level so the recursion ends right away
|
||||
#if facility is defined (not None) and is not part of the facilities
|
||||
# then keep going down the tree structure
|
||||
|
||||
instance.help()
|
||||
print()
|
||||
try:
|
||||
try:
|
||||
#only applications have it
|
||||
instance.Usage()
|
||||
except Exception:
|
||||
pass
|
||||
print()
|
||||
if steps:
|
||||
self.step_help(instance)
|
||||
sys.exit(0)
|
||||
except Exception as x:
|
||||
sys.exit(0)
|
||||
finally:
|
||||
pass
|
||||
|
||||
#sometime there is no help available. Postpone the printing until
|
||||
#there is something to print for sure
|
||||
fullMessage = ""
|
||||
fullMessage = "\nSee the table of configurable parameters listed \n"
|
||||
fullMessage += "below for a list of parameters that may be specified in the\n"
|
||||
fullMessage += "input file. See example input xml files in the isce 'examples'\n"
|
||||
fullMessage += "directory. Read about the input file in the ISCE.pdf document.\n"
|
||||
|
||||
# maxname = max(len(n) for n in self.dictionaryOfVariables.keys())
|
||||
# maxtype = max(len(str(x[1])) for x in self.dictionaryOfVariables.values())
|
||||
# maxman = max(len(str(x[2])) for x in self.dictionaryOfVariables.values())
|
||||
# maxdoc = max(len(x) for x in self.descriptionOfVariables.values())
|
||||
maxname = 27
|
||||
maxtype = 10
|
||||
maxman = 10
|
||||
maxdoc = 30
|
||||
underman = "="*maxman
|
||||
undertype = "="*maxtype
|
||||
undername = "="*maxname
|
||||
underdoc = "="*maxdoc
|
||||
spc = " "
|
||||
n = 1
|
||||
spc0 = spc*n
|
||||
|
||||
fullMessage += "\nThe user configurable inputs are given in the following table.\n"
|
||||
fullMessage += "Those inputs that are of type 'component' are also listed in\n"
|
||||
fullMessage += "table of facilities below with additional information.\n"
|
||||
fullMessage += "To configure the parameters, enter the desired value in the\n"
|
||||
fullMessage += "input file using a property tag with name = to the name\n"
|
||||
fullMessage += "given in the table.\n"
|
||||
|
||||
line = "name".ljust(maxname,' ')+spc0+"type".ljust(maxtype,' ')
|
||||
line += spc0+"mandatory".ljust(maxman,' ')+spc0+"doc".ljust(maxdoc,' ')
|
||||
|
||||
fullMessage += line + '\n'
|
||||
|
||||
line = undername+spc0+undertype+spc0+underman+spc0+underdoc
|
||||
|
||||
fullMessage += line + '\n'
|
||||
|
||||
#make sure that there is something to print
|
||||
shallPrint = False
|
||||
instance.reformatDictionaryOfVariables()
|
||||
for x, y in collections.OrderedDict(sorted(instance.dictionaryOfVariables.items())).items():
|
||||
#skip the mandatory private. Those are parameters of Facilities that
|
||||
#are only used by the framework and the user should not know about
|
||||
if y['mandatory'] and y['private']:
|
||||
continue
|
||||
if x in instance.descriptionOfVariables:
|
||||
z = instance.descriptionOfVariables[x]['doc']
|
||||
elif x in instance._dictionaryOfFacilities and 'doc' in instance._dictionaryOfFacilities[x]:
|
||||
z = instance._dictionaryOfFacilities[x]['doc']
|
||||
else:
|
||||
z = 'N/A'
|
||||
shallPrint = True
|
||||
try:
|
||||
yt = str(y['type']).split("'")[1]
|
||||
except:
|
||||
yt = str(y['type'])
|
||||
|
||||
lines = []
|
||||
self.cont_string = ''
|
||||
lines.append(self.columnate_words(x, maxname, self.cont_string))
|
||||
lines.append(self.columnate_words(yt, maxtype, self.cont_string))
|
||||
lines.append(self.columnate_words(str(y['mandatory']), maxman, self.cont_string))
|
||||
lines.append(self.columnate_words(z, maxdoc, self.cont_string))
|
||||
nlines = max(map(len,lines))
|
||||
for row in lines:
|
||||
row += [' ']*(nlines-len(row))
|
||||
for ll in range(nlines):
|
||||
fullMessage += lines[0][ll].ljust(maxname,' ')
|
||||
fullMessage += spc0+lines[1][ll].ljust(maxtype,' ')
|
||||
fullMessage += spc0+lines[2][ll].ljust(maxman,' ')
|
||||
fullMessage += spc0+lines[3][ll].ljust(maxdoc,' ') + '\n'
|
||||
# line = spc0+x.ljust(maxname)+spc0+yt.ljust(maxtype)
|
||||
# line += spc0+y[2].ljust(maxman)+spc0+z.ljust(maxdoc)
|
||||
# print(line)
|
||||
if(shallPrint):
|
||||
print(fullMessage)
|
||||
else:
|
||||
print("No help available\n")
|
||||
#only print the following if there are facilities
|
||||
if(instance._dictionaryOfFacilities.keys()):
|
||||
#maxname = max(len(n) for n in self._dictionaryOfFacilities.keys())
|
||||
maxname = 20
|
||||
undername = "="*maxname
|
||||
|
||||
# maxmod = max(
|
||||
# len(x['factorymodule']) for x in
|
||||
# self._dictionaryOfFacilities.values()
|
||||
# )
|
||||
maxmod = 15
|
||||
undermod = "="*maxmod
|
||||
|
||||
# maxfac = max(
|
||||
# len(x['factoryname']) for x in
|
||||
# self._dictionaryOfFacilities.values()
|
||||
# )
|
||||
maxfac = 17
|
||||
underfac = "="*maxfac
|
||||
|
||||
# maxarg = max(
|
||||
# len(str(x['args'])) for x in self._dictionaryOfFacilities.values()
|
||||
# )
|
||||
maxarg = 20
|
||||
underarg = "="*maxarg
|
||||
|
||||
# maxkwa = max(
|
||||
# len(str(x['kwargs'])) for x in
|
||||
# self._dictionaryOfFacilities.values()
|
||||
# )
|
||||
maxkwa = 7
|
||||
# underkwa = "="*max(maxkwa, 6)
|
||||
underkwa = "="*maxkwa
|
||||
spc = " "
|
||||
n = 1
|
||||
spc0 = spc*n
|
||||
firstTime = True
|
||||
for x, y in collections.OrderedDict(sorted(instance._dictionaryOfFacilities.items())).items():
|
||||
#skip the mandatory private. Those are parameters of Facilities that
|
||||
#are only used by the framework and the user should not know about
|
||||
if y['mandatory'] and y['private']:
|
||||
continue
|
||||
#only print if there is something
|
||||
if firstTime:
|
||||
firstTime = False
|
||||
print()
|
||||
print("The configurable facilities are given in the following table.")
|
||||
print("Enter the component parameter values for any of these "+
|
||||
"facilities in the")
|
||||
print("input file using a component tag with name = to "+
|
||||
"the name given in")
|
||||
print("the table. The configurable parameters for a facility "+
|
||||
"are entered with ")
|
||||
print("property tags inside the component tag. Examples of the "+
|
||||
"configurable")
|
||||
print("parameters are available in the examples/inputs directory.")
|
||||
print("For more help on a given facility run")
|
||||
print("iscehelp.py -t type")
|
||||
print("where type (if available) is the second entry in the table")
|
||||
print()
|
||||
|
||||
line = "name".ljust(maxname)+spc0+"type".ljust(maxmod)
|
||||
|
||||
print(line)
|
||||
line = " ".ljust(maxname)+spc0+" ".ljust(maxmod)
|
||||
|
||||
print(line)
|
||||
line = undername+spc0+undermod
|
||||
print(line)
|
||||
|
||||
lines = []
|
||||
self.cont_string = ''
|
||||
lines.append(self.columnate_words(x, maxname, self.cont_string))
|
||||
z = self.columnate_words(self.getTypeFromFactory(y['factoryname']),maxmod, self.cont_string)
|
||||
lines.append(z)
|
||||
|
||||
nlines = max(map(len,lines))
|
||||
for row in lines:
|
||||
row += [' ']*(nlines-len(row))
|
||||
for ll in range(nlines):
|
||||
out = lines[0][ll].ljust(maxname)
|
||||
out += spc0+lines[1][ll].ljust(maxmod)
|
||||
print(out)
|
||||
|
||||
# line = spc0+x.ljust(maxname)+spc0+y['factorymodule'].ljust(maxmod)
|
||||
# line += spc0+y['factoryname'].ljust(maxfac)
|
||||
# line += spc0+str(y['args']).ljust(maxarg)
|
||||
# line += spc0+str(y['kwargs']).ljust(maxkwa)
|
||||
# print(line)
|
||||
|
||||
return sys.exit(1)
|
||||
def columnate_words(self, s, n, cont='',onePerLine=False):
|
||||
"""
|
||||
arguments = s (str), n (int), [cont (str)]
|
||||
s is a sentence
|
||||
n is the column width
|
||||
Returns an array of strings of width <= n.
|
||||
If any word is longer than n, then the word is split with
|
||||
continuation character cont at the end of each column
|
||||
"""
|
||||
#Split the string s into a list of words
|
||||
a = s.split()
|
||||
|
||||
#Check the first word as to whether it fits in n columns
|
||||
if a:
|
||||
if len(a[0]) > n:
|
||||
y = [x for x in self.nsplit(a[0]+" ", n, cont)]
|
||||
else:
|
||||
y = [a[0]]
|
||||
cnt = len(y[-1])
|
||||
|
||||
for i in range(1, len(a)):
|
||||
cnt += len(a[i])+1
|
||||
if cnt <= n:
|
||||
if not onePerLine:
|
||||
y[-1] += " "+a[i]
|
||||
else:
|
||||
y.append(a[i])
|
||||
else:
|
||||
y += self.nsplit(a[i], n, cont)
|
||||
if not onePerLine:
|
||||
cnt = len(y[-1])
|
||||
else:
|
||||
cnt = n+1
|
||||
|
||||
else:
|
||||
y = ['']
|
||||
return y
|
||||
|
||||
def nsplit(self, s, nc, cont=''):
|
||||
x = []
|
||||
ns = len(s)
|
||||
n = nc - len(cont)
|
||||
for i in range(int(ns/n)):
|
||||
x.append(s[i*n:(i+1)*n]+cont)
|
||||
if ns%n:
|
||||
x.append(s[int(ns/n)*n:])
|
||||
return x
|
||||
|
||||
def typeNeedsNoArgs(self,type_):
|
||||
try:
|
||||
ret = False
|
||||
for k,v in self._registered[type_]['args'].items():
|
||||
#it's positional so it need the args
|
||||
if k.isdigit():
|
||||
ret = True
|
||||
break
|
||||
elif (not 'optional' in v) or (not ('optional' in v and v['optional'])):
|
||||
ret = True
|
||||
break
|
||||
except Exception:
|
||||
ret = False
|
||||
return (not ret)
|
||||
|
||||
def printInfo(self,type_,helpIfNoArg = False, steps=False):
|
||||
#try to print the info of the arguments necessary to instanciate the instance
|
||||
try:
|
||||
sortedArgs = collections.OrderedDict(sorted(self._registered[type_]['args'].items()))
|
||||
maxname = 17
|
||||
undername = "="*maxname
|
||||
maxtype = 10
|
||||
undertype = "="*maxtype
|
||||
maxargtype = 10
|
||||
underargtype = "="*maxargtype
|
||||
maxman = 10
|
||||
underman = "="*maxman
|
||||
maxvals = 20
|
||||
undervals = "="*maxvals
|
||||
maxdef = 10
|
||||
underdef = "="*maxdef
|
||||
spc = " "
|
||||
n = 1
|
||||
spc0 = spc*n
|
||||
line = "name".ljust(maxname,' ')+spc0+"type".ljust(maxtype,' ')+spc0+"argtype".ljust(maxargtype,' ')
|
||||
line += spc0+"mandatory".ljust(maxman,' ')+spc0+"values".ljust(maxvals,' ')+spc0+"default".ljust(maxdef,' ')
|
||||
|
||||
fullMessage = line + '\n'
|
||||
|
||||
line = undername+spc0+undertype+spc0+underargtype+spc0+underman+spc0+undervals+spc0+underdef
|
||||
shallPrint = False
|
||||
fullMessage += line + '\n'
|
||||
for arg,val in sortedArgs.items():
|
||||
try:
|
||||
type = str(val['type'])
|
||||
except Exception:
|
||||
type = 'N/A'
|
||||
if(arg.isdigit()):
|
||||
argtype = 'positional'
|
||||
else:
|
||||
argtype = 'keyword'
|
||||
try:
|
||||
mandatory = 'False' if val['optional'] else 'True'
|
||||
except Exception:
|
||||
mandatory = 'True'
|
||||
try:
|
||||
default = str(val['default'])
|
||||
except Exception:
|
||||
default = 'Not set'
|
||||
|
||||
if isinstance(val['value'],list):
|
||||
posarg = ' '.join(val['value'])
|
||||
elif isinstance(val['value'],str) and val['value']:
|
||||
posarg = val['value']
|
||||
else:
|
||||
posarg = ''
|
||||
|
||||
lines = []
|
||||
self.cont_string = ''
|
||||
lines.append(self.columnate_words(arg, maxname, self.cont_string))
|
||||
lines.append(self.columnate_words(type, maxtype, self.cont_string))
|
||||
lines.append(self.columnate_words(argtype, maxargtype, self.cont_string))
|
||||
lines.append(self.columnate_words(mandatory, maxman, self.cont_string))
|
||||
lines.append(self.columnate_words(posarg, maxvals, self.cont_string,True))
|
||||
lines.append(self.columnate_words(default, maxdef, self.cont_string))
|
||||
|
||||
nlines = max(map(len,lines))
|
||||
for row in lines:
|
||||
try:
|
||||
row += [' ']*(nlines-len(row))
|
||||
except:
|
||||
dummy = 1
|
||||
for ll in range(nlines):
|
||||
fullMessage += lines[0][ll].ljust(maxname,' ')
|
||||
fullMessage += spc0+lines[1][ll].ljust(maxtype,' ')
|
||||
fullMessage += spc0+lines[2][ll].ljust(maxargtype,' ')
|
||||
fullMessage += spc0+lines[3][ll].ljust(maxman,' ')
|
||||
fullMessage += spc0+lines[4][ll].ljust(maxvals,' ')
|
||||
fullMessage += spc0+lines[5][ll].ljust(maxdef,' ') + '\n'
|
||||
shallPrint = True
|
||||
# line = spc0+x.ljust(maxname)+spc0+yt.ljust(maxtype)
|
||||
# line += spc0+y[2].ljust(maxman)+spc0+z.ljust(maxdoc)
|
||||
# print(line)
|
||||
if(shallPrint):
|
||||
print("\nType ",type_, ": Constructor requires arguments described in the\n" +
|
||||
"table below. Use the -a option with the mandatory arguments\n"+
|
||||
"to ask for more help. Run iscehelp.py -h for more info on the -a option.\n",sep="")
|
||||
|
||||
print(fullMessage)
|
||||
except Exception:
|
||||
print("\nType ",type_, ": constructor requires no arguments",sep="")
|
||||
|
||||
#try to see if one can create an instance and provide more help
|
||||
if helpIfNoArg:
|
||||
instance = self.getInstance(type_)
|
||||
self.askHelp(instance, self._inputs.steps)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
def printAll(self):
|
||||
for k in self._registered.keys():
|
||||
self.printInfo(k)
|
||||
|
||||
|
||||
def run(self):
|
||||
self.parse()
|
||||
sys.argv = [sys.argv[0]]
|
||||
|
||||
noArgs = True
|
||||
for k,v in self._inputs._get_kwargs():
|
||||
if(v):
|
||||
noArgs = False
|
||||
break
|
||||
|
||||
if self._inputs.info or noArgs:
|
||||
#if no arguments provided i.e. self._input has all the attributes = None
|
||||
#then print the list of all available helps
|
||||
self.printAll()
|
||||
elif self._inputs.type and not self._inputs.args:
|
||||
#if only -t type is provided print how to get help for that specific type
|
||||
self.printInfo(self._inputs.type,helpIfNoArg=self.typeNeedsNoArgs(self._inputs.type))
|
||||
elif self._inputs.type and (self._inputs.args):
|
||||
#if type and arguments are provided then provide help for that type
|
||||
if self._inputs.type in self._registered:
|
||||
instance = self.getInstance(self._inputs.type)
|
||||
self.askHelp(instance, self._inputs.steps)
|
||||
else:
|
||||
print("Help for",self._inputs.type,"is not available. Run iscehelp.py"+\
|
||||
" with no options to see the list of available type of objects" +\
|
||||
" one can get help for")
|
||||
sys.exit(1)
|
||||
elif self._inputs.type and self._inputs.steps and not self._inputs.args:
|
||||
#if only -t type is provided print how to get help for that specific type
|
||||
self.printInfo(self._inputs.type, helpIfNoArg=True,
|
||||
steps=self._inputs.steps)
|
||||
elif self._inputs.type and (self._inputs.args) and self._inputs.steps:
|
||||
#if type and arguments are provided then provide help for that type
|
||||
if self._inputs.type in self._registered:
|
||||
instance = self.getInstance(self._inputs.type)
|
||||
self.askHelp(instance, self._inputs.steps)
|
||||
else:
|
||||
print("Help for",self._inputs.type,"is not available. Run iscehelp.py"+\
|
||||
" with -i (--info) to see the list of available type of objects" +\
|
||||
" one can get help for")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
|
||||
def parse(self):
|
||||
epilog = 'Run iscehelp.py with no arguments or with -i option to list the available object\n'
|
||||
epilog += 'types for which help is provided\n'
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog)
|
||||
parser.add_argument('-i','--info',dest='info',action='store_true',help='Provides the list of registered object types')
|
||||
parser.add_argument('-t','--type',dest='type',type=str,help='Specifies the object type for which help is sought')
|
||||
parser.add_argument('-a','--args',dest='args',type=str,nargs='+',help='Set of positional and keyword arguments '\
|
||||
+'that the factory of the object "type" takes.'\
|
||||
+ 'The keyword arguments are specified as keyword=value with no spaces.')
|
||||
parser.add_argument('-s','--steps',dest='steps',action='store_true',help='Provides the list of steps in the help message')
|
||||
|
||||
self._inputs = parser.parse_args()
|
||||
def __init__(self):
|
||||
import isce
|
||||
#the directory is defined in SConstruct
|
||||
self._helpDir = os.path.join(isce.__path__[0],'helper')
|
||||
self._registered = self.getRegistered()
|
||||
self._inputs = None
|
||||
|
||||
def main():
|
||||
hp = Helper()
|
||||
hp.run()
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
|
|
@ -0,0 +1,126 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Piyush Agram
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import isce
|
||||
import argparse
|
||||
from isceobj.Image import createImage,createDemImage
|
||||
from mroipac.looks.Looks import Looks
|
||||
|
||||
class customArgparseFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
'''
|
||||
For better help message that also shows the defaults.
|
||||
'''
|
||||
pass
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command Line Parser.
|
||||
'''
|
||||
parser = argparse.ArgumentParser(description='Take integer number of looks.',
|
||||
formatter_class=customArgparseFormatter,
|
||||
epilog = '''
|
||||
|
||||
Example:
|
||||
|
||||
looks.py -i input.file -o output.file -r 4 -a 4
|
||||
|
||||
''')
|
||||
parser.add_argument('-i','--input', type=str, required=True, help='Input ISCEproduct with a corresponding .xml file.', dest='infile')
|
||||
parser.add_argument('-o','--output',type=str, default=None, help='Output ISCE DEproduct with a corresponding .xml file.', dest='outfile')
|
||||
parser.add_argument('-r', '--range', type=int, default=1, help='Number of range looks. Default: 1', dest='rglooks')
|
||||
parser.add_argument('-a', '--azimuth', type=int, default=1, help='Number of azimuth looks. Default: 1', dest='azlooks')
|
||||
|
||||
values = parser.parse_args()
|
||||
if (values.rglooks == 1) and (values.azlooks == 1):
|
||||
print('Nothing to do. One look requested in each direction. Exiting ...')
|
||||
sys.exit(0)
|
||||
|
||||
return values
|
||||
|
||||
def main(inps):
|
||||
'''
|
||||
The main driver.
|
||||
'''
|
||||
|
||||
if inps.infile.endswith('.xml'):
|
||||
inFileXml = inps.infile
|
||||
inFile = os.path.splitext(inps.infile)[0]
|
||||
else:
|
||||
inFile = inps.infile
|
||||
inFileXml = inps.infile + '.xml'
|
||||
|
||||
if inps.outfile is None:
|
||||
spl = os.path.splitext(inFile)
|
||||
ext = '.{0}alks_{1}rlks'.format(inps.azlooks, inps.rglooks)
|
||||
outFile = spl[0] + ext + spl[1]
|
||||
|
||||
elif inps.outfile.endswith('.xml'):
|
||||
outFile = os.path.splitext(inps.outfile)[0]
|
||||
else:
|
||||
outFile = inps.outfile
|
||||
|
||||
|
||||
|
||||
print('Output filename : {0}'.format(outFile))
|
||||
#hackish, just to know the image type to instantiate the correct type
|
||||
#until we put the info about how to generate the instance in the xml
|
||||
from iscesys.Parsers.FileParserFactory import createFileParser
|
||||
FP = createFileParser('xml')
|
||||
tmpProp, tmpFact, tmpMisc = FP.parse(inFileXml)
|
||||
if('image_type' in tmpProp and tmpProp['image_type'] == 'dem'):
|
||||
inImage = createDemImage()
|
||||
else:
|
||||
inImage = createImage()
|
||||
|
||||
inImage.load(inFileXml)
|
||||
inImage.filename = inFile
|
||||
|
||||
lkObj = Looks()
|
||||
lkObj.setDownLooks(inps.azlooks)
|
||||
lkObj.setAcrossLooks(inps.rglooks)
|
||||
lkObj.setInputImage(inImage)
|
||||
lkObj.setOutputFilename(outFile)
|
||||
lkObj.looks()
|
||||
|
||||
return outFile
|
||||
|
||||
if __name__ == '__main__':
|
||||
'''
|
||||
Makes the script executable.
|
||||
'''
|
||||
|
||||
inps = cmdLineParse()
|
||||
main(inps)
|
||||
|
|
@ -0,0 +1,179 @@
|
|||
|
||||
import sys
|
||||
import os
|
||||
import fnmatch
|
||||
import Tkinter, tkFileDialog
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
|
||||
class App(Tkinter.Frame):
|
||||
|
||||
def __init__(self,reference=None):
|
||||
Tkinter.Frame.__init__(self,reference)
|
||||
self.reference.title('ISSI Input File Generator')
|
||||
|
||||
self.filterList = None
|
||||
self.filterX = Tkinter.IntVar()
|
||||
self.filterY = Tkinter.IntVar()
|
||||
self.tec = Tkinter.StringVar()
|
||||
self.fr = Tkinter.StringVar()
|
||||
self.phase = Tkinter.StringVar()
|
||||
|
||||
self.grid()
|
||||
self._buildGUI()
|
||||
|
||||
|
||||
def findFiles(self,dir):
|
||||
"""Find a list of the files needed for Faraday Rotation estimation"""
|
||||
filenames = {'leader': None,
|
||||
'image': {}}
|
||||
# Look for files that start with IMG
|
||||
# note, this will only work with JAXA/ASF style CEOS files
|
||||
# ERSDAC file nameing structure is not supported
|
||||
for root,dirs,files in os.walk(dir):
|
||||
for file in files:
|
||||
# Find the leader file
|
||||
if (fnmatch.fnmatch(file,'LED*')):
|
||||
leaderFile = os.path.join(root,file)
|
||||
filenames['leader'] = leaderFile
|
||||
# Find the image files
|
||||
elif (fnmatch.fnmatch(file,'IMG*')):
|
||||
polarity = file[4:6]
|
||||
imageFile = os.path.join(root,file)
|
||||
filenames['image'][polarity] = imageFile
|
||||
|
||||
return filenames
|
||||
|
||||
def createImageXML(self,files):
|
||||
"""Create an XML input file from the dictionary of input files"""
|
||||
|
||||
for polarity in ('HH','HV','VH','VV'):
|
||||
output = polarity + '.xml'
|
||||
root = ElementTree.Element('component')
|
||||
# Leader File
|
||||
leaderProperty = ElementTree.SubElement(root,'property')
|
||||
leaderName = ElementTree.SubElement(leaderProperty,'name')
|
||||
leaderValue = ElementTree.SubElement(leaderProperty,'value')
|
||||
leaderName.text = 'LEADERFILE'
|
||||
leaderValue.text = files['leader']
|
||||
# Image File
|
||||
imageProperty = ElementTree.SubElement(root,'property')
|
||||
imageName = ElementTree.SubElement(imageProperty,'name')
|
||||
imageValue = ElementTree.SubElement(imageProperty,'value')
|
||||
imageName.text = 'IMAGEFILE'
|
||||
imageValue.text = files['image'][polarity]
|
||||
|
||||
tree = ElementTree.ElementTree(root)
|
||||
self.indent(tree.getroot())
|
||||
tree.write(output)
|
||||
|
||||
def createAuxilliaryXML(self,output):
|
||||
"""Create an input file with the default file names"""
|
||||
root = ElementTree.Element('component')
|
||||
for polarity in ('HH','HV','VH','VV'):
|
||||
filename = polarity + '.xml'
|
||||
|
||||
property = ElementTree.SubElement(root,'property')
|
||||
name = ElementTree.SubElement(property,'name')
|
||||
factoryName = ElementTree.SubElement(property,'factoryname')
|
||||
factoryModule = ElementTree.SubElement(property,'factorymodule')
|
||||
value = ElementTree.SubElement(property,'value')
|
||||
name.text = polarity
|
||||
factoryName.text = 'createALOS'
|
||||
factoryModule.text = 'isceobj.Sensor'
|
||||
value.text = filename
|
||||
|
||||
tree = ElementTree.ElementTree(root)
|
||||
self.indent(tree.getroot())
|
||||
tree.write(output)
|
||||
|
||||
def createOutputXML(self,output):
|
||||
"""Create the output xml file"""
|
||||
root = ElementTree.Element('component')
|
||||
products = {'FILTER': self.filterList.get(),
|
||||
'FILTER_SIZE_X': str(self.filterX.get()),
|
||||
'FILTER_SIZE_Y': str(self.filterY.get()),
|
||||
'FARADAY_ROTATION': self.fr.get(),
|
||||
'TEC': self.tec.get(),
|
||||
'PHASE': self.phase.get()}
|
||||
for key in products:
|
||||
property = ElementTree.SubElement(root,'property')
|
||||
name = ElementTree.SubElement(property,'name')
|
||||
value = ElementTree.SubElement(property,'value')
|
||||
name.text = key
|
||||
value.text = products[key]
|
||||
|
||||
tree = ElementTree.ElementTree(root)
|
||||
self.indent(tree.getroot())
|
||||
tree.write(output)
|
||||
|
||||
|
||||
def indent(self,elem, level=0):
|
||||
"""Indent and XML ElementTree"""
|
||||
i = "\n" + level*" "
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
self.indent(elem, level+1)
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
else:
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
def chooseFiles(self):
|
||||
"""Create a dialog box for the ALOS Quad-pol directory"""
|
||||
dir = tkFileDialog.askdirectory(parent=self,title="Choose a directory")
|
||||
if (dir):
|
||||
files = self.findFiles(dir)
|
||||
try:
|
||||
self.createImageXML(files)
|
||||
self.createAuxilliaryXML('FR.xml')
|
||||
self.createOutputXML('output.xml')
|
||||
print("XML Files Created")
|
||||
except Exception as strerr:
|
||||
print(strerr)
|
||||
print("No ALOS files found in %s" % (dir))
|
||||
|
||||
def _buildGUI(self):
|
||||
"""Create widgets and build the GUI"""
|
||||
filterLabel = Tkinter.Label(self,text='Choose Filter Type:')
|
||||
xSizeLabel = Tkinter.Label(self,text='Range Filter Size')
|
||||
ySizeLabel = Tkinter.Label(self,text='Azimuth Filter Size')
|
||||
tecLabel = Tkinter.Label(self,text='TEC Output Filename')
|
||||
frLabel = Tkinter.Label(self,text='Faraday Rotation Output Filename')
|
||||
phaseLabel = Tkinter.Label(self,text='Phase Correction Output Filename')
|
||||
|
||||
self.filterList = Tkinter.Spinbox(self,values=('None','Mean','Median','Gaussian'))
|
||||
xSizeEntry = Tkinter.Entry(self,textvariable=self.filterX)
|
||||
ySizeEntry = Tkinter.Entry(self,textvariable=self.filterY)
|
||||
frEntry = Tkinter.Entry(self,textvariable=self.fr)
|
||||
tecEntry = Tkinter.Entry(self,textvariable=self.tec)
|
||||
phaseEntry = Tkinter.Entry(self,textvariable=self.phase)
|
||||
dirButton = Tkinter.Button(self,text="Choose Data Directory",command=self.chooseFiles)
|
||||
quitButton = Tkinter.Button(self,text="Quit",command=self.quit)
|
||||
|
||||
filterLabel.grid(row=0,column=0)
|
||||
self.filterList.grid(row=0,column=1)
|
||||
xSizeLabel.grid(row=1,column=0)
|
||||
xSizeEntry.grid(row=1,column=1)
|
||||
ySizeLabel.grid(row=2,column=0)
|
||||
ySizeEntry.grid(row=2,column=1)
|
||||
frLabel.grid(row=3,column=0)
|
||||
frEntry.grid(row=3,column=1)
|
||||
tecLabel.grid(row=4,column=0)
|
||||
tecEntry.grid(row=4,column=1)
|
||||
phaseLabel.grid(row=5,column=0)
|
||||
phaseEntry.grid(row=5,column=1)
|
||||
dirButton.grid(row=6,column=0)
|
||||
quitButton.grid(row=6,column=1)
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""
|
||||
Simple example program for creating input files for ISSI.
|
||||
"""
|
||||
app = App()
|
||||
app.mainloop()
|
||||
|
|
@ -0,0 +1,440 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
import isce
|
||||
from isce import logging
|
||||
from iscesys.Compatibility import Compatibility
|
||||
from iscesys.Component.Component import Component, Port
|
||||
from isceobj.Planet.Ellipsoid import Ellipsoid
|
||||
from isceobj.Doppler.Doppler import Doppler
|
||||
from isceobj.Orbit.Orbit import Orbit
|
||||
#from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU
|
||||
from iscesys import DateTimeUtil as DTU
|
||||
|
||||
from iscesys.Component.Application import Application
|
||||
from isce.applications.insarApp import SENSOR_NAME, DOPPLER_METHOD
|
||||
from isceobj.Scene.Frame import FrameMixin
|
||||
|
||||
from isceobj.Util.decorators import port
|
||||
|
||||
SENSOR = Application.Facility('sensor',
|
||||
public_name='sensor',
|
||||
module='isceobj.Sensor',
|
||||
factory='createSensor',
|
||||
args=(SENSOR_NAME, ),
|
||||
mandatory=True,
|
||||
doc="Reference raw data component"
|
||||
)
|
||||
DOPPLER = Application.Facility('doppler',
|
||||
public_name='doppler',
|
||||
module='isceobj.Doppler',
|
||||
factory='createDoppler',
|
||||
args=(DOPPLER_METHOD, ),
|
||||
mandatory=False,
|
||||
doc="Reference Doppler calculation method"
|
||||
)
|
||||
|
||||
class makeRawApp(Application):
|
||||
|
||||
parameter_list = (SENSOR_NAME, DOPPLER_METHOD)
|
||||
facility_list = (SENSOR, DOPPLER)
|
||||
|
||||
def main(self):
|
||||
self.make_raw.wireInputPort(name='doppler', object=self.doppler)
|
||||
self.make_raw.wireInputPort(name='sensor', object=self.sensor)
|
||||
self.make_raw.make_raw()
|
||||
self.printInfo()
|
||||
|
||||
def printInfo(self):
|
||||
print(self.make_raw.frame)
|
||||
print(self.make_raw)
|
||||
|
||||
def __init__(self):
|
||||
Application.__init__(self, "makeraw")
|
||||
self.sensor = None
|
||||
self.doppler = None
|
||||
self.make_raw = make_raw()
|
||||
|
||||
def initFromArglist(self, arglist):
|
||||
self.initFactory(arglist)
|
||||
self.sensor = self.getComponent('Sensor')
|
||||
self.doppler = self.getComponent('Doppler')
|
||||
|
||||
|
||||
class make_raw(Component, FrameMixin):
|
||||
|
||||
def __init__(self):
|
||||
self.sensor = None
|
||||
self.doppler = None
|
||||
self.dopplerValues = None
|
||||
self.frame = None
|
||||
# Derived Values
|
||||
self.spacecraftHeight = 0.0
|
||||
self.heightDt = 0.0
|
||||
self.velocity = 0.0
|
||||
self.squint = 0.0
|
||||
self.iqImage = None
|
||||
Component.__init__(self)
|
||||
|
||||
sensorPort = Port(name='sensor', method=self.addSensor)
|
||||
dopplerPort = Port(name='doppler', method=self.addDoppler)
|
||||
|
||||
self._inputPorts.add(sensorPort)
|
||||
self._inputPorts.add(dopplerPort)
|
||||
self.logger = logging.getLogger("isce.make_raw")
|
||||
return None
|
||||
|
||||
def __getstate__(self):
|
||||
d = dict(self.__dict__)
|
||||
del d['logger']
|
||||
return d
|
||||
|
||||
def __setstate__(self, d):
|
||||
self.__dict__.update(d)
|
||||
self.logger = logging.getLogger("isce.make_raw")
|
||||
return None
|
||||
|
||||
@port('extractImage')
|
||||
def addSensor(self):
|
||||
return None
|
||||
|
||||
@port('calculateDoppler')
|
||||
def addDoppler(self):
|
||||
return None
|
||||
|
||||
def getFrame(self):
|
||||
return self.frame
|
||||
|
||||
def getIQImage(self):
|
||||
return self.iqImage
|
||||
|
||||
def getDopplerValues(self):
|
||||
return self.dopplerValues
|
||||
|
||||
def getSpacecraftHeight(self):
|
||||
return self.spacecraftHeight
|
||||
|
||||
def getHeightDT(self):
|
||||
return self.heightDt
|
||||
|
||||
def getVelocity(self):
|
||||
return self.velocity
|
||||
|
||||
def getSquint(self):
|
||||
return self.squint
|
||||
|
||||
def calculateHeightDt(self):
|
||||
orbit = self.orbit
|
||||
ellipsoid = self.ellipsoid
|
||||
startTime = self.sensingStart
|
||||
midTime = self.sensingMid
|
||||
sv0 = orbit.interpolate(startTime)
|
||||
sv1 = orbit.interpolate(midTime)
|
||||
|
||||
startHeight = sv0.calculateHeight(ellipsoid)
|
||||
midHeight = sv1.calculateHeight(ellipsoid)
|
||||
if ('uav' in self.sensor.family.lower()) and (hasattr(self.sensor, 'platformHeight')):
|
||||
self.spacecraftHeight = self.sensor.platformHeight
|
||||
else:
|
||||
self.spacecraftHeight = startHeight
|
||||
self.heightDt = (
|
||||
(midHeight - startHeight)/
|
||||
DTU.timeDeltaToSeconds(midTime - startTime)
|
||||
)
|
||||
|
||||
def calculateVelocity(self):
|
||||
import math
|
||||
orbit = self.orbit
|
||||
midTime = self.sensingMid
|
||||
|
||||
sv = orbit.interpolateOrbit(midTime)
|
||||
vx1, vy1, vz1 = sv.velocity
|
||||
self.velocity = math.sqrt(vx1**2 + vy1**2 + vz1**2)
|
||||
|
||||
def calculateSquint(self):
|
||||
"""Calculate the squint angle
|
||||
R0 is the starting range
|
||||
h is the height at mid-swath
|
||||
v is the velocity at mid-swath
|
||||
"""
|
||||
import math
|
||||
startingRange = self.startingRange
|
||||
prf = self.PRF
|
||||
wavelength = self.radarWavelength
|
||||
h = self.spacecraftHeight
|
||||
try:
|
||||
z = self.sensor.terrainHeight
|
||||
except:
|
||||
z = 0.0
|
||||
v = self.velocity
|
||||
|
||||
if h - z > startingRange:
|
||||
raise ValueError(
|
||||
("Spacecraft Height - Terrain Height (%s) " +
|
||||
"larger than starting Range (%s)") % (h-z, startingRange))
|
||||
|
||||
sinTheta = math.sqrt( 1 - ((h-z)/startingRange)**2 )
|
||||
|
||||
if 'a' in self.doppler.quadratic:
|
||||
fd = self.doppler.quadratic['a']*prf
|
||||
elif isinstance(self.doppler.quadratic, (list, tuple)):
|
||||
####For UAVSAR
|
||||
fd = self.doppler.quadratic[0]
|
||||
else:
|
||||
self.logger.error(
|
||||
"make_raw doesn't handle doppler coefficient object type, ",
|
||||
type(self.doppler.quadratic)
|
||||
)
|
||||
|
||||
sinSquint = fd/(2.0*v*sinTheta)*wavelength
|
||||
if sinSquint**2 > 1:
|
||||
raise ValueError(
|
||||
"Error in One or More of the Squint Calculation Values\n"+
|
||||
"Doppler Centroid: %s\nVelocity: %s\nWavelength: %s\n" %
|
||||
(fd, v, wavelength)
|
||||
)
|
||||
self.squint = math.degrees(
|
||||
math.atan2(sinSquint, math.sqrt(1-sinSquint**2))
|
||||
)
|
||||
#squint is used later on from the frame; add it here
|
||||
self.frame.squintAngle = math.radians(self.squint)
|
||||
|
||||
def make_raw(self):
|
||||
from isceobj.Image import createRawImage, createSlcImage
|
||||
self.activateInputPorts()
|
||||
|
||||
# Parse the image metadata and extract the image
|
||||
self.logger.info('Extracting image')
|
||||
try:
|
||||
self.sensor.extractImage()
|
||||
except NotImplementedError as strerr:
|
||||
self.logger.error("%s" % (strerr))
|
||||
self.logger.error(
|
||||
"make_raw not implemented for %s" % self.sensor.__class__
|
||||
)
|
||||
raise NotImplementedError
|
||||
#reset the global variable to empty so can go back to use default api
|
||||
self.sensor.frame.image.renderVRT()
|
||||
self.frame = self.sensor.frame
|
||||
|
||||
#jng NOTE if we pass just the sensor also in the case of raw image we
|
||||
## can avoid the if
|
||||
if isinstance(self.frame.image, createRawImage().__class__):
|
||||
# Calculate the doppler fit
|
||||
self.logger.info("Calculating Doppler Centroid")
|
||||
|
||||
try:
|
||||
self.doppler.wireInputPort(name='frame',
|
||||
object=self.frame)
|
||||
except:
|
||||
computeFlag = False
|
||||
else:
|
||||
computeFlag = True
|
||||
|
||||
if computeFlag:
|
||||
self.doppler.wireInputPort(name='instrument',
|
||||
object=self.frame.instrument)
|
||||
self.doppler.wireInputPort(name='image',
|
||||
object=self.frame.image)
|
||||
self.doppler.calculateDoppler()
|
||||
|
||||
else:
|
||||
self.doppler.wireInputPort(name='sensor', object=self.sensor)
|
||||
self.doppler.calculateDoppler()
|
||||
|
||||
#new jng compute slc image size here
|
||||
rangeSamplingRate = self.instrument.rangeSamplingRate
|
||||
rangePulseDuration = self.instrument.pulseLength
|
||||
goodBytes = self.frame.image.xmax - self.frame.image.xmin
|
||||
try:
|
||||
#check if the instrument implements it, if not set it to zero
|
||||
chirpExtension = self.instrument.chirpExtension # Should probably be a percentage rather than a set number
|
||||
except AttributeError:
|
||||
chirpExtension = 0
|
||||
|
||||
chirpSize = int(rangeSamplingRate * rangePulseDuration)
|
||||
self.frame.numberRangeBins = (int(goodBytes/2) -
|
||||
chirpSize + chirpExtension)
|
||||
|
||||
|
||||
elif isinstance(self.frame.image, createSlcImage().__class__):
|
||||
# jng changed in view of the new tsx preproc from Howard
|
||||
self.doppler.wireInputPort(name='sensor', object=self.sensor)
|
||||
self.doppler.calculateDoppler()
|
||||
|
||||
#new jng compute slc image size here
|
||||
self.frame.numberRangeBins = self.frame.image.width
|
||||
else:
|
||||
message = (
|
||||
"Unrecognized image type %s" %
|
||||
str(self.frame.image.__class__)
|
||||
)
|
||||
self.logger.error(message)
|
||||
raise TypeError(message)
|
||||
|
||||
# Fit a polynomial to the doppler values. in the tsx case or every
|
||||
# zero doppler case this function simple sets the a = fd b = 0, c = 0
|
||||
self.doppler.fitDoppler()
|
||||
|
||||
# Create a doppler object
|
||||
prf = self.frame.instrument.PRF
|
||||
#coef = self.doppler.coeff_list
|
||||
#for ii in range(len(coef), 4):
|
||||
# coef.append(0.0)
|
||||
|
||||
if 'a' in self.doppler.quadratic:
|
||||
coef = [self.doppler.quadratic['a']*prf,0.0,0.0,0.0]
|
||||
elif isinstance(self.doppler.quadratic, (list, tuple)):
|
||||
####For UAVSAR
|
||||
coef = self.doppler.quadratic
|
||||
else:
|
||||
self.logger.error(
|
||||
"make_raw doesn't handle doppler coefficient object type, ",
|
||||
type(self.doppler.quadratic)
|
||||
)
|
||||
|
||||
self.dopplerValues = Doppler(prf=prf)
|
||||
self.dopplerValues.setDopplerCoefficients(coef, inHz=True)
|
||||
|
||||
if self.frame._dopplerVsPixel is None:
|
||||
self.frame._dopplerVsPixel = [x*prf for x in coef]
|
||||
|
||||
# Calculate the height, height_dt, and velocity
|
||||
self.logger.info("Calculating Spacecraft Velocity")
|
||||
self.calculateHeightDt()
|
||||
self.calculateVelocity()
|
||||
|
||||
# Calculate squint angle
|
||||
self.logger.info("Calculating Squint Angle")
|
||||
self.calculateSquint()
|
||||
self.frame.image.numberGoodBytes = self.frame.image.xmax - self.frame.image.xmin
|
||||
self.frame.image.coord1.coordStart = self.frame.image.xmin
|
||||
self.createIQImage()
|
||||
self.frame.image.renderHdr()
|
||||
#just in case the Sensor does not compute the pulse timing
|
||||
try:
|
||||
self.adjustSensingStart()
|
||||
except:
|
||||
pass
|
||||
return None
|
||||
|
||||
def createIQImage(self):
|
||||
from isceobj.Image import createRawIQImage
|
||||
|
||||
#create an RawIQImage with appropriate values from the RawImage
|
||||
self.iqImage = createRawIQImage()
|
||||
self.iqImage.width = self.frame.image.width/2
|
||||
self.iqImage.xmax = self.iqImage.width
|
||||
self.iqImage.length = self.frame.image.length
|
||||
self.iqImage.coord1.coordStart = int(self.frame.image.coord1.coordStart/2)
|
||||
self.iqImage.numberGoodSamples = int(self.frame.image.numberGoodBytes/2)
|
||||
self.iqImage.filename = self.frame.image.filename #the file is the same as for the raw
|
||||
self.iqImage.inPhase = self.frame.instrument.getInPhaseValue()
|
||||
self.iqImage.quadrature = self.frame.instrument.getQuadratureValue()
|
||||
#change the name that will be used for the xml file
|
||||
filename = self.frame.image.filename.replace('.raw','.iq.xml')
|
||||
#just in case the extension was not .raw
|
||||
if not filename.count('.iq'):
|
||||
filename += '.iq.xml'
|
||||
self.iqImage.renderHdr(filename)
|
||||
|
||||
#change the name that will be used for the vrt file
|
||||
filename = filename.replace('.xml','.vrt')
|
||||
self.iqImage.renderVRT(filename)
|
||||
|
||||
def adjustSensingStart(self, pulseTimingFilename=None, ext='.aux'):
|
||||
pulseTimingFilename = (
|
||||
pulseTimingFilename or
|
||||
self.frame.image.filename + ext
|
||||
)
|
||||
import datetime as dt
|
||||
import math
|
||||
import struct
|
||||
|
||||
with open(pulseTimingFilename) as fp:
|
||||
allF = fp.read()
|
||||
pass
|
||||
|
||||
#use only a limited number of point from the first frame
|
||||
lines = min(len(allF)/16, 10000)
|
||||
allT = [0]*lines
|
||||
d0 = struct.unpack('<d', allF[0:8])[0]
|
||||
day0 = dt.timedelta(d0).days
|
||||
sec = 0
|
||||
for i in range(lines):
|
||||
day, musec = struct.unpack('<dd', allF[i*16:(i+1)*16])
|
||||
# note the musec are relative to the day, not to the second i.e.
|
||||
# they are the total musec in the day
|
||||
td = dt.timedelta(day, sec, musec)
|
||||
allT[i] = (
|
||||
(td.microseconds +
|
||||
(td.seconds +
|
||||
(td.days - day0) * 24 * 3600.0) * 10**6) / 10**6
|
||||
)
|
||||
pass
|
||||
prf = self.frame.instrument.PRF
|
||||
sumPart = [allT[i] - i/prf for i in xrange(len(allT))]
|
||||
sum = math.fsum(sumPart)
|
||||
sum /= len(allT)
|
||||
day = day0
|
||||
sec = math.floor(sum)
|
||||
musec = (sum - sec)*10**6
|
||||
sensingOld = self.frame.sensingStart
|
||||
#day-1 since we start from jan 1 and not jan 0
|
||||
newSensingStart = (
|
||||
dt.datetime(sensingOld.year, 1, 1) +
|
||||
dt.timedelta(day-1, sec, musec)
|
||||
)
|
||||
self.frame.setSensingStart(newSensingStart)
|
||||
self.logger.info("Changing sensing start from %s to %s" %
|
||||
(str(sensingOld), str(newSensingStart)))
|
||||
|
||||
def __str__(self):
|
||||
retstr = "Velocity: (%s)\n"
|
||||
retlst = (self.velocity, )
|
||||
retstr += "HeightDt: (%s)\n"
|
||||
retlst += (self.heightDt, )
|
||||
retstr += "Squint: (%s)\n"
|
||||
retlst += (self.squint, )
|
||||
retstr += "Height: (%s)\n"
|
||||
retlst += (self.spacecraftHeight, )
|
||||
return retstr % retlst
|
||||
|
||||
pass
|
||||
|
||||
## JEB: added a main for script operation
|
||||
def main():
|
||||
return makeRawApp().run()
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if (len(sys.argv) < 2):
|
||||
print("Usage:%s <xml-parameter file>" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
main()
|
||||
|
|
@ -0,0 +1,62 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import sys
|
||||
import os
|
||||
import math
|
||||
import isce
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
from iscesys.Display.Display import Display
|
||||
##
|
||||
# Call mdx.py argv.
|
||||
# The first element in argv must be the metadata file (i.e. metadata.rsc or metadata.xml) when displaying an image (could be something else when printing help info). If the file does not end by .rsc or .xml, then one needs to specify
|
||||
# the -type flag that could be rsc or xml. For rsc type of metadata the rsc ROI_PAC format is assumed. For xml type the ISCE xml format is assumed.
|
||||
# In case the data file name is not simply the metadata file name with the extension removed (for instance metadata file image.int.rsc and data file image.int)
|
||||
# then use the -image flag and specify the filename.
|
||||
# If the type of image that needs to be displayed cannot be inferred from the extension (for ROI_PAC type) or from the metadata doc string (ISCE type) then specify the -ext flag.
|
||||
# To print a list of extensions run mdx.py -ext.
|
||||
# To print the usage with the list of options just run mdx.py with no arguments.
|
||||
# The flags -cw,-e,-amp1,-amp2,-chdr,-RMG-Mag,-RMG_Hgt -wrap,-wrap and -cmap have some defaults value depending on the image type. By specifying these flags in the command line the default values can be overwritten.
|
||||
# Whatever flags in the argv that are not part of the abovementioned ones, will be passed to mdx as arguments at the end of the command.
|
||||
##
|
||||
def main(argv = None):
|
||||
DS = Display()
|
||||
DS.mdx(argv)
|
||||
|
||||
if __name__ == "__main__":
|
||||
if len(sys.argv) == 1:
|
||||
sys.exit(main())
|
||||
else:
|
||||
sys.exit(main(sys.argv[1:]))
|
||||
|
|
@ -0,0 +1,51 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Eric Gurrola
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
# import sqlite3, sql_mod, string # ImportError: No module named sql_mod
|
||||
import sqlite3, string
|
||||
|
||||
con = sqlite3.connect('roi.db')
|
||||
cur = con.cursor()
|
||||
|
||||
tables = {'file1':'file1','file2':'file2','igram1':'igram1','log':'log',
|
||||
'ambiguity':'ambiguity_table'}
|
||||
|
||||
for k, v in tables.items():
|
||||
print()
|
||||
print()
|
||||
print("table: ",v)
|
||||
print("================")
|
||||
print()
|
||||
a = cur.execute('select * from '+v)
|
||||
for x in a:
|
||||
print(x)
|
||||
|
||||
|
|
@ -0,0 +1,470 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Authors: Giangi Sacco, Eric Gurrola
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
import time
|
||||
import sys
|
||||
from isce import logging
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
import iscesys
|
||||
from iscesys.Component.Application import Application
|
||||
from iscesys.Compatibility import Compatibility
|
||||
from iscesys.Component.Configurable import SELF
|
||||
from isceobj import RtcProc
|
||||
from isceobj.Util.decorators import use_api
|
||||
|
||||
logger = logging.getLogger('isce.grdsar')
|
||||
|
||||
|
||||
SENSOR_NAME = Application.Parameter(
|
||||
'sensorName',
|
||||
public_name='sensor name',
|
||||
default='SENTINEL1',
|
||||
type=str,
|
||||
mandatory=True,
|
||||
doc="Sensor name"
|
||||
)
|
||||
|
||||
USE_HIGH_RESOLUTION_DEM_ONLY = Application.Parameter(
|
||||
'useHighResolutionDemOnly',
|
||||
public_name='useHighResolutionDemOnly',
|
||||
default=False,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc=(
|
||||
"""If True and a dem is not specified in input, it will only
|
||||
download the SRTM highest resolution dem if it is available
|
||||
and fill the missing portion with null values (typically -32767)."""
|
||||
)
|
||||
)
|
||||
DEM_FILENAME = Application.Parameter(
|
||||
'demFilename',
|
||||
public_name='demFilename',
|
||||
default='',
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc="Filename of the Digital Elevation Model (DEM)"
|
||||
)
|
||||
|
||||
WATER_FILENAME = Application.Parameter(
|
||||
'waterFilename',
|
||||
public_name='waterFilename',
|
||||
default='',
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='Filename with SWBD data')
|
||||
|
||||
APPLY_WATER_MASK = Application.Parameter(
|
||||
'applyWaterMask',
|
||||
public_name='apply water mask',
|
||||
default=False,
|
||||
type=bool,
|
||||
mandatory=False,
|
||||
doc = 'Flag to apply water mask to images')
|
||||
|
||||
GEOCODE_BOX = Application.Parameter(
|
||||
'geocode_bbox',
|
||||
public_name='geocode bounding box',
|
||||
default = None,
|
||||
container=list,
|
||||
type=float,
|
||||
doc='Bounding box for geocoding - South, North, West, East in degrees'
|
||||
)
|
||||
|
||||
EPSG = Application.Parameter(
|
||||
'epsg',
|
||||
public_name='epsg id',
|
||||
default = '',
|
||||
type=str,
|
||||
doc='epsg code for roi'
|
||||
)
|
||||
|
||||
GSPACING = Application.Parameter('gspacing',
|
||||
public_name='geocode spacing',
|
||||
default = 100.0,
|
||||
type = float,
|
||||
doc = 'Desired grid spacing of geocoded product in meters, in the specified UTM grid.'
|
||||
)
|
||||
|
||||
INTMETHOD = Application.Parameter('intmethod',
|
||||
public_name='geocode interpolation method',
|
||||
default = 'bilinear',
|
||||
type = str,
|
||||
doc = 'Desired grid spacing of geocoded product in meters, in the specified UTM grid.'
|
||||
)
|
||||
|
||||
PICKLE_DUMPER_DIR = Application.Parameter(
|
||||
'pickleDumpDir',
|
||||
public_name='pickle dump directory',
|
||||
default='PICKLE',
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc=(
|
||||
"If steps is used, the directory in which to store pickle objects."
|
||||
)
|
||||
)
|
||||
PICKLE_LOAD_DIR = Application.Parameter(
|
||||
'pickleLoadDir',
|
||||
public_name='pickle load directory',
|
||||
default='PICKLE',
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc=(
|
||||
"If steps is used, the directory from which to retrieve pickle objects."
|
||||
)
|
||||
)
|
||||
|
||||
RENDERER = Application.Parameter(
|
||||
'renderer',
|
||||
public_name='renderer',
|
||||
default='xml',
|
||||
type=str,
|
||||
mandatory=True,
|
||||
doc=(
|
||||
"Format in which the data is serialized when using steps. Options are xml (default) or pickle."
|
||||
))
|
||||
|
||||
NUMBER_AZIMUTH_LOOKS = Application.Parameter('numberAzimuthLooks',
|
||||
public_name='azimuth looks',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='')
|
||||
|
||||
|
||||
NUMBER_RANGE_LOOKS = Application.Parameter('numberRangeLooks',
|
||||
public_name='range looks',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc=''
|
||||
)
|
||||
|
||||
POSTING = Application.Parameter('posting',
|
||||
public_name='posting',
|
||||
default = 10.0,
|
||||
type = float,
|
||||
mandatory = False,
|
||||
doc = 'Posting of data. This can be any integer multiple of the product resolution. Used to determine looks')
|
||||
|
||||
POLARIZATIONS = Application.Parameter('polarizations',
|
||||
public_name='polarizations',
|
||||
default = [],
|
||||
type = str,
|
||||
container = list,
|
||||
doc = 'Polarizations to process')
|
||||
|
||||
GEOCODE_LIST = Application.Parameter(
|
||||
'geocode_list',
|
||||
public_name='geocode list',
|
||||
default = None,
|
||||
container=list,
|
||||
type=str,
|
||||
doc = "List of products to geocode."
|
||||
)
|
||||
|
||||
APPLY_THERMAL_NOISE_CORRECTION = Application.Parameter(
|
||||
'apply_thermal_noise_correction',
|
||||
public_name='apply thermal noise correction',
|
||||
default=False,
|
||||
type=bool,
|
||||
mandatory=False,
|
||||
doc = 'Flag to apply thermal noise correction. Currently only available for Sentinel-1.')
|
||||
|
||||
|
||||
#Facility declarations
|
||||
REFERENCE = Application.Facility(
|
||||
'reference',
|
||||
public_name='Reference',
|
||||
module='isceobj.Sensor.GRD',
|
||||
factory='createSensor',
|
||||
args=(SENSOR_NAME, 'reference'),
|
||||
mandatory=True,
|
||||
doc="GRD data component"
|
||||
)
|
||||
|
||||
DEM_STITCHER = Application.Facility(
|
||||
'demStitcher',
|
||||
public_name='demStitcher',
|
||||
module='iscesys.DataManager',
|
||||
factory='createManager',
|
||||
args=('dem1','iscestitcher',),
|
||||
mandatory=False,
|
||||
doc="Object that based on the frame bounding boxes creates a DEM"
|
||||
)
|
||||
|
||||
|
||||
_GRD = Application.Facility(
|
||||
'_grd',
|
||||
public_name='rtcproc',
|
||||
module='isceobj.RtcProc',
|
||||
factory='createRtcProc',
|
||||
args = ('rtcAppContext',isceobj.createCatalog('rtcProc')),
|
||||
mandatory=False,
|
||||
doc="RtcProc object"
|
||||
)
|
||||
|
||||
|
||||
class GRDSAR(Application):
|
||||
|
||||
family = 'grdsar'
|
||||
## Define Class parameters in this list
|
||||
parameter_list = (SENSOR_NAME,
|
||||
USE_HIGH_RESOLUTION_DEM_ONLY,
|
||||
DEM_FILENAME,
|
||||
NUMBER_AZIMUTH_LOOKS,
|
||||
NUMBER_RANGE_LOOKS,
|
||||
POSTING,
|
||||
GEOCODE_BOX,
|
||||
EPSG,
|
||||
GSPACING,
|
||||
INTMETHOD,
|
||||
PICKLE_DUMPER_DIR,
|
||||
PICKLE_LOAD_DIR,
|
||||
RENDERER,
|
||||
POLARIZATIONS,
|
||||
GEOCODE_LIST,
|
||||
APPLY_THERMAL_NOISE_CORRECTION)
|
||||
|
||||
facility_list = (REFERENCE,
|
||||
DEM_STITCHER,
|
||||
_GRD)
|
||||
|
||||
_pickleObj = "_grd"
|
||||
|
||||
def __init__(self, family='', name='',cmdline=None):
|
||||
import isceobj
|
||||
from isceobj.RtcProc import RtcProc
|
||||
from iscesys.StdOEL.StdOELPy import create_writer
|
||||
|
||||
super().__init__(
|
||||
family=family if family else self.__class__.family, name=name,
|
||||
cmdline=cmdline)
|
||||
|
||||
self._stdWriter = create_writer("log", "", True, filename="grdsar.log")
|
||||
self._add_methods()
|
||||
self._insarProcFact = RtcProc
|
||||
return None
|
||||
|
||||
|
||||
|
||||
def Usage(self):
|
||||
print("Usages: ")
|
||||
print("rtcApp.py <input-file.xml>")
|
||||
print("rtcApp.py --steps")
|
||||
print("rtcApp.py --help")
|
||||
print("rtcApp.py --help --steps")
|
||||
|
||||
|
||||
def _init(self):
|
||||
|
||||
message = (
|
||||
("ISCE VERSION = %s, RELEASE_SVN_REVISION = %s,"+
|
||||
"RELEASE_DATE = %s, CURRENT_SVN_REVISION = %s") %
|
||||
(isce.__version__,
|
||||
isce.release_svn_revision,
|
||||
isce.release_date,
|
||||
isce.svn_revision)
|
||||
)
|
||||
logger.info(message)
|
||||
|
||||
print(message)
|
||||
return None
|
||||
|
||||
def _configure(self):
|
||||
|
||||
self.grd.procDoc._addItem("ISCE_VERSION",
|
||||
"Release: %s, svn-%s, %s. Current svn-%s" %
|
||||
(isce.release_version, isce.release_svn_revision,
|
||||
isce.release_date, isce.svn_revision
|
||||
),
|
||||
["rtcProc"]
|
||||
)
|
||||
|
||||
if(self.geocode_list is None):
|
||||
self.geocode_list = self.grd.geocode_list
|
||||
else:
|
||||
g_count = 0
|
||||
for g in self.geocode_list:
|
||||
if g not in self.grd.geocode_list:
|
||||
g_count += 1
|
||||
#warn if there are any differences in content
|
||||
if g_count > 0:
|
||||
print()
|
||||
logger.warning((
|
||||
"Some filenames in rtcApp.geocode_list configuration "+
|
||||
"are different from those in rtcProc. Using names given"+
|
||||
" to grdApp."))
|
||||
print("grdApp.geocode_list = {}".format(self.geocode_list))
|
||||
print(("grdProc.geocode_list = {}".format(
|
||||
self.grd.geocode_list)))
|
||||
|
||||
self.grd.geocode_list = self.geocode_list
|
||||
|
||||
return None
|
||||
|
||||
@property
|
||||
def grd(self):
|
||||
return self._grd
|
||||
|
||||
@grd.setter
|
||||
def grd(self, value):
|
||||
self._grd = value
|
||||
return None
|
||||
|
||||
@property
|
||||
def procDoc(self):
|
||||
return self.grd.procDoc
|
||||
|
||||
@procDoc.setter
|
||||
def procDoc(self):
|
||||
raise AttributeError(
|
||||
"Can not assign to .grd.procDoc-- but you hit all its other stuff"
|
||||
)
|
||||
|
||||
def _finalize(self):
|
||||
pass
|
||||
|
||||
def help(self):
|
||||
from isceobj.Sensor.GRD import SENSORS
|
||||
print(self.__doc__)
|
||||
lsensors = list(SENSORS.keys())
|
||||
lsensors.sort()
|
||||
print("The currently supported sensors are: ", lsensors)
|
||||
return None
|
||||
|
||||
def help_steps(self):
|
||||
print(self.__doc__)
|
||||
print("A description of the individual steps can be found in the README file")
|
||||
print("and also in the ISCE.pdf document")
|
||||
return
|
||||
|
||||
|
||||
def renderProcDoc(self):
|
||||
self.procDoc.renderXml()
|
||||
|
||||
def startup(self):
|
||||
self.help()
|
||||
self._grd.timeStart = time.time()
|
||||
|
||||
def endup(self):
|
||||
self.renderProcDoc()
|
||||
self._grd.timeEnd = time.time()
|
||||
logger.info("Total Time: %i seconds" %
|
||||
(self._grd.timeEnd-self._grd.timeStart))
|
||||
return None
|
||||
|
||||
|
||||
## Add instance attribute RunWrapper functions, which emulate methods.
|
||||
def _add_methods(self):
|
||||
self.runPreprocessor = RtcProc.createPreprocessor(self)
|
||||
self.verifyDEM = RtcProc.createVerifyDEM(self)
|
||||
self.multilook = RtcProc.createLooks(self)
|
||||
self.runTopo = RtcProc.createTopo(self)
|
||||
self.runNormalize = RtcProc.createNormalize(self)
|
||||
self.runGeocode = RtcProc.createGeocode(self)
|
||||
|
||||
return None
|
||||
|
||||
def _steps(self):
|
||||
|
||||
self.step('startup', func=self.startup,
|
||||
doc=("Print a helpful message and "+
|
||||
"set the startTime of processing")
|
||||
)
|
||||
|
||||
# Run a preprocessor for the two sets of frames
|
||||
self.step('preprocess',
|
||||
func=self.runPreprocessor,
|
||||
doc=(
|
||||
"""Unpack the input data"""
|
||||
)
|
||||
)
|
||||
|
||||
# Verify whether the DEM was initialized properly. If not, download
|
||||
# a DEM
|
||||
self.step('verifyDEM', func=self.verifyDEM)
|
||||
|
||||
#Multilook product as needed
|
||||
self.step('multilook', func=self.multilook)
|
||||
|
||||
##Run topo for each bursts
|
||||
self.step('topo', func=self.runTopo)
|
||||
|
||||
##Run normalize to get gamma0
|
||||
self.step('normalize', func=self.runNormalize)
|
||||
|
||||
# Geocode
|
||||
self.step('geocode', func=self.runGeocode)
|
||||
|
||||
return None
|
||||
|
||||
@use_api
|
||||
def main(self):
|
||||
self.help()
|
||||
|
||||
timeStart= time.time()
|
||||
|
||||
# Run a preprocessor for the two sets of frames
|
||||
self.runPreprocessor()
|
||||
|
||||
#Verify whether user defined a dem component. If not, then download
|
||||
# SRTM DEM.
|
||||
self.verifyDEM()
|
||||
|
||||
#Multilook as needed
|
||||
self.multilook()
|
||||
|
||||
##Run topo for each burst
|
||||
self.runTopo()
|
||||
|
||||
##Run normalize to get gamma0
|
||||
self.runNormalize()
|
||||
|
||||
# Geocode
|
||||
self.runGeocode()
|
||||
|
||||
timeEnd = time.time()
|
||||
logger.info("Total Time: %i seconds" %(timeEnd - timeStart))
|
||||
|
||||
self.renderProcDoc()
|
||||
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
grdsar = GRDSAR(name="rtcApp")
|
||||
grdsar.configure()
|
||||
grdsar.run()
|
||||
|
|
@ -0,0 +1,136 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import isce
|
||||
import logging
|
||||
import logging.config
|
||||
from iscesys.Component.Application import Application
|
||||
from iscesys.Component.Component import Component
|
||||
import os
|
||||
STITCHER = Component.Parameter('_stitcher',
|
||||
public_name='stitcher',
|
||||
default = 'version3',
|
||||
type = str,
|
||||
mandatory = False,
|
||||
doc = "Use as argument for the stitcher factory. Supported old version 2 or new version 3 SRTM")
|
||||
class Stitcher(Application):
|
||||
def main(self):
|
||||
# prevent from deleting local files
|
||||
if(self.demStitcher._useLocalDirectory):
|
||||
self.demStitcher._keepAfterFailed = True
|
||||
self.demStitcher._keepDems = True
|
||||
# is a metadata file is created set the right type
|
||||
if(self.demStitcher._meta == 'xml'):
|
||||
self.demStitcher.setCreateXmlMetadata(True)
|
||||
elif(self.demStitcher._meta == 'rsc'):
|
||||
self.demStitcher.setCreateRscMetadata(True)
|
||||
# check for the action to be performed
|
||||
if(self.demStitcher._action == 'stitch'):
|
||||
if(self.demStitcher._bbox):
|
||||
lat = self.demStitcher._bbox[0:2]
|
||||
lon = self.demStitcher._bbox[2:4]
|
||||
if (self.demStitcher._outputFile is None):
|
||||
self.demStitcher._outputFile = self.demStitcher.defaultName(self.demStitcher._bbox)
|
||||
|
||||
if not(self.demStitcher.stitchDems(lat,lon,self.demStitcher._source,self.demStitcher._outputFile,self.demStitcher._downloadDir, \
|
||||
keep=self.demStitcher._keepDems)):
|
||||
print('Could not create a stitched DEM. Some tiles are missing')
|
||||
else:
|
||||
if(self.demStitcher._correct):
|
||||
width = self.demStitcher.getDemWidth(lon,self.demStitcher._source)
|
||||
self.demStitcher.correct()
|
||||
#self.demStitcher.correct(self.demStitcher._output,self.demStitcher._source,width,min(lat[0],lat[1]),min(lon[0],lon[1]))
|
||||
else:
|
||||
print('Error. The --bbox (or -b) option must be specified when --action stitch is used')
|
||||
raise ValueError
|
||||
elif(self.demStitcher._action == 'download'):
|
||||
if(self.demStitcher._bbox):
|
||||
lat = self.demStitcher._bbox[0:2]
|
||||
lon = self.demStitcher._bbox[2:4]
|
||||
self.demStitcher.getDemsInBox(lat,lon,self.demStitcher._source,self.demStitcher._downloadDir)
|
||||
#can make the bbox and pairs mutually esclusive if replace the if below with elif
|
||||
if(self.demStitcher._pairs):
|
||||
self.demStitcher.downloadFilesFromList(self.demStitcher._pairs[::2],self.demStitcher._pairs[1::2],self.demStitcher._source,self.demStitcher._downloadDir)
|
||||
if(not (self.demStitcher._bbox or self.demStitcher._pairs)):
|
||||
print('Error. Either the --bbox (-b) or the --pairs (-p) options must be specified when --action download is used')
|
||||
raise ValueError
|
||||
|
||||
else:
|
||||
print('Unrecognized action -a or --action',self.demStitcher._action)
|
||||
return
|
||||
|
||||
if(self.demStitcher._report):
|
||||
for k,v in list(self.demStitcher._downloadReport.items()):
|
||||
print(k,'=',v)
|
||||
|
||||
def _facilities(self):
|
||||
"""
|
||||
Define the user configurable facilities for this application.
|
||||
"""
|
||||
self.demStitcher = self.facility(
|
||||
'demStitcher',
|
||||
public_name='demStitcher',
|
||||
module='contrib.demUtils',
|
||||
factory='createDemStitcher',
|
||||
args=(self.stitcher,'iscestitcher',),
|
||||
mandatory=False,
|
||||
doc=(
|
||||
"Object that based on the frame bounding boxes creates a DEM"
|
||||
)
|
||||
)
|
||||
def Usage(self):
|
||||
print("\nUsage: stitcher.py input.xml\n")
|
||||
print("NOTE: if you don't want to store your password in a file you can run it as\n" +\
|
||||
"'stitcher.py input.xml sticher.demStitcher.username=yourUsername\n" +\
|
||||
"sticher.demStitcher.password=yourPassword'\n\n" )
|
||||
|
||||
family = 'stitcher'
|
||||
|
||||
parameter_list = (STITCHER,)
|
||||
|
||||
@property
|
||||
def stitcher(self):
|
||||
return self._stitcher
|
||||
@stitcher.setter
|
||||
def stitcher(self,stitcher):
|
||||
self._stitcher = stitcher
|
||||
|
||||
def __init__(self,family = '', name = ''):
|
||||
super(Stitcher, self).__init__(family if family else self.__class__.family, name=name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
ds = Stitcher()
|
||||
ds.configure()
|
||||
ds.run()
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,331 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2016 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Joshua Cohen
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
import time
|
||||
import sys
|
||||
from isce import logging
|
||||
|
||||
import isce
|
||||
import isceobj
|
||||
from isceobj import TopsProc
|
||||
from isce.applications.topsApp import TopsInSAR
|
||||
from iscesys.Component.Application import Application
|
||||
from isceobj.Util.decorators import use_api
|
||||
|
||||
logger = logging.getLogger('isce.insar')
|
||||
|
||||
WINDOW_SIZE_WIDTH = Application.Parameter(
|
||||
'winwidth',
|
||||
public_name='Ampcor window width',
|
||||
default=32,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor main window size width. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
WINDOW_SIZE_HEIGHT = Application.Parameter(
|
||||
'winhgt',
|
||||
public_name='Ampcor window height',
|
||||
default=32,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor main window size height. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
SEARCH_WINDOW_WIDTH = Application.Parameter(
|
||||
'srcwidth',
|
||||
public_name='Ampcor search window width',
|
||||
default=20,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor search window size width. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
SEARCH_WINDOW_HEIGHT = Application.Parameter(
|
||||
'srchgt',
|
||||
public_name='Ampcor search window height',
|
||||
default=20,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor search window size height. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
SKIP_SAMPLE_ACROSS = Application.Parameter(
|
||||
'skipwidth',
|
||||
public_name='Ampcor skip width',
|
||||
default=16,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor skip across width. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
SKIP_SAMPLE_DOWN = Application.Parameter(
|
||||
'skiphgt',
|
||||
public_name='Ampcor skip height',
|
||||
default=16,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor skip down height. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
OFFSET_MARGIN = Application.Parameter(
|
||||
'margin',
|
||||
public_name='Ampcor margin',
|
||||
default=50,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor margin offset. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
OVERSAMPLING_FACTOR = Application.Parameter(
|
||||
'oversample',
|
||||
public_name='Ampcor oversampling factor',
|
||||
default=32,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor oversampling factor. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
ACROSS_GROSS_OFFSET = Application.Parameter(
|
||||
'rgshift',
|
||||
public_name='Range shift',
|
||||
default=0,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor gross offset across. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
DOWN_GROSS_OFFSET = Application.Parameter(
|
||||
'azshift',
|
||||
public_name='Azimuth shift',
|
||||
default=0,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor gross offset down. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
OFFSET_SCALING_FACTOR = Application.Parameter(
|
||||
'scale_factor',
|
||||
public_name='Offset scaling factor',
|
||||
default=1.0,
|
||||
type=float,
|
||||
mandatory=False,
|
||||
doc='Offset field unit scaling factor (1.0 default is pixel)'
|
||||
)
|
||||
|
||||
OFFSET_WIDTH = Application.Parameter(
|
||||
'offset_width',
|
||||
public_name='Offset image nCols',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Number of columns in the final offset field (calculated in DenseAmpcor).'
|
||||
)
|
||||
|
||||
OFFSET_LENGTH = Application.Parameter(
|
||||
'offset_length',
|
||||
public_name='Offset image nRows',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Number of rows in the final offset field (calculated in DenseAmpcor).'
|
||||
)
|
||||
|
||||
OFFSET_TOP = Application.Parameter(
|
||||
'offset_top',
|
||||
public_name='Top offset location',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor-calculated top offset location. Overridden by workflow.'
|
||||
)
|
||||
|
||||
OFFSET_LEFT = Application.Parameter(
|
||||
'offset_left',
|
||||
public_name='Left offset location',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Ampcor-calculated left offset location. Overridden by workflow.'
|
||||
)
|
||||
|
||||
SNR_THRESHOLD = Application.Parameter(
|
||||
'snr_thresh',
|
||||
public_name='SNR Threshold factor',
|
||||
default=None,
|
||||
type=float,
|
||||
mandatory=False,
|
||||
doc='SNR Threshold factor used in filtering offset field objects.'
|
||||
)
|
||||
|
||||
FILTER_NULL = Application.Parameter(
|
||||
'filt_null',
|
||||
public_name='Filter NULL factor',
|
||||
default=-10000.,
|
||||
type=float,
|
||||
mandatory=False,
|
||||
doc='NULL factor to use in filtering offset fields to avoid numpy type issues.'
|
||||
)
|
||||
|
||||
FILTER_WIN_SIZE = Application.Parameter(
|
||||
'filt_size',
|
||||
public_name='Filter window size',
|
||||
default=5,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc='Window size for median_filter.'
|
||||
)
|
||||
|
||||
OFFSET_OUTPUT_FILE = Application.Parameter(
|
||||
'offsetfile',
|
||||
public_name='Offset filename',
|
||||
default='dense_offsets',
|
||||
type=None,
|
||||
mandatory=False,
|
||||
doc='Filename for gross dense offsets BIL. Used in runDenseOffsets.'
|
||||
)
|
||||
|
||||
FILT_OFFSET_OUTPUT_FILE = Application.Parameter(
|
||||
'filt_offsetfile',
|
||||
public_name='Filtered offset filename',
|
||||
default='filt_dense_offsets',
|
||||
type=None,
|
||||
mandatory=False,
|
||||
doc='Filename for filtered dense offsets BIL.'
|
||||
)
|
||||
|
||||
OFFSET_MODE = Application.Parameter(
|
||||
'off_mode',
|
||||
public_name='Is offset mode',
|
||||
default=True,
|
||||
type=bool,
|
||||
mandatory=False,
|
||||
doc='Application-specific parameter to indicate whether running topsApp or topsOffsetApp.'
|
||||
)
|
||||
|
||||
OFFSET_GEOCODE_LIST = Application.Parameter(
|
||||
'off_geocode_list',
|
||||
public_name='offset geocode list',
|
||||
default=None,
|
||||
container=list,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='List of offset-specific files to geocode.'
|
||||
)
|
||||
|
||||
#Basically extends the TopsInSAR class
|
||||
class TopsOffset(TopsInSAR):
|
||||
|
||||
# Pull TopsInSAR's parameter/facility lists
|
||||
parameter_list = TopsInSAR.parameter_list + ( \
|
||||
WINDOW_SIZE_WIDTH,
|
||||
WINDOW_SIZE_HEIGHT,
|
||||
SEARCH_WINDOW_WIDTH,
|
||||
SEARCH_WINDOW_HEIGHT,
|
||||
SKIP_SAMPLE_ACROSS,
|
||||
SKIP_SAMPLE_DOWN,
|
||||
OFFSET_MARGIN,
|
||||
OVERSAMPLING_FACTOR,
|
||||
ACROSS_GROSS_OFFSET,
|
||||
DOWN_GROSS_OFFSET,
|
||||
OFFSET_SCALING_FACTOR,
|
||||
OFFSET_WIDTH,
|
||||
OFFSET_LENGTH,
|
||||
OFFSET_TOP,
|
||||
OFFSET_LEFT,
|
||||
SNR_THRESHOLD,
|
||||
FILTER_NULL,
|
||||
FILTER_WIN_SIZE,
|
||||
OFFSET_OUTPUT_FILE,
|
||||
FILT_OFFSET_OUTPUT_FILE,
|
||||
OFFSET_MODE,
|
||||
OFFSET_GEOCODE_LIST)
|
||||
facility_list = TopsInSAR.facility_list
|
||||
|
||||
family = 'topsinsar'
|
||||
_pickleObj = '_insar'
|
||||
|
||||
def __init__(self, family='', name='',cmdline=None):
|
||||
super().__init__(family=family if family else self.__class__.family, name=name,
|
||||
cmdline=cmdline)
|
||||
self._add_methods()
|
||||
|
||||
@use_api
|
||||
def main(self):
|
||||
|
||||
timeStart = time.time()
|
||||
|
||||
#self._steps()
|
||||
|
||||
self.runMergeSLCs()
|
||||
self.runDenseOffsets()
|
||||
self.runCropOffsetGeo()
|
||||
self.runOffsetFilter()
|
||||
self.runOffsetGeocode()
|
||||
|
||||
timeEnd = time.time()
|
||||
print('Total Time: %i seconds' % (timeEnd-timeStart))
|
||||
return None
|
||||
|
||||
def _add_methods(self):
|
||||
self.verifyDEM = TopsProc.createVerifyDEM(self) ### NOTE: Not independently called, needed for
|
||||
self.runGeocode = TopsProc.createGeocode(self) ### runGeocode.py
|
||||
self.runMergeSLCs = TopsProc.createMergeSLCs(self)
|
||||
self.runDenseOffsets = TopsProc.createDenseOffsets(self)
|
||||
self.runCropOffsetGeo = TopsProc.createCropOffsetGeo(self)
|
||||
self.runOffsetFilter = TopsProc.createOffsetFilter(self)
|
||||
self.runOffsetGeocode = TopsProc.createOffsetGeocode(self)
|
||||
return None
|
||||
|
||||
def _steps(self):
|
||||
|
||||
self.step('startup', func=self.startup,
|
||||
doc=('Print a helpful message and'+
|
||||
'set the startTime of processing')
|
||||
)
|
||||
|
||||
self.step('mergeSLCs', func=self.runMergeSLCs)
|
||||
|
||||
self.step('denseOffsets', func=self.runDenseOffsets)
|
||||
|
||||
self.step('cropOffsetGeo', func=self.runCropOffsetGeo)
|
||||
|
||||
self.step('offsetFilter', func=self.runOffsetFilter)
|
||||
|
||||
self.step('offsetGeocode', func=self.runOffsetGeocode)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
topsOffset = TopsOffset(name="topsOffsetApp")
|
||||
topsOffset.configure()
|
||||
topsOffset.run()
|
||||
|
|
@ -0,0 +1,102 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2013 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Piyush Agram
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import os
|
||||
import logging
|
||||
import sys
|
||||
|
||||
import isce
|
||||
import argparse
|
||||
from contrib.demUtils.UpsampleDem import UpsampleDem
|
||||
from iscesys.Parsers.FileParserFactory import createFileParser
|
||||
from isceobj.Image import createDemImage
|
||||
|
||||
class customArgparseFormatter(argparse.ArgumentDefaultsHelpFormatter, argparse.RawDescriptionHelpFormatter):
|
||||
'''
|
||||
For better help message that also shows the defaults.
|
||||
'''
|
||||
pass
|
||||
|
||||
def cmdLineParse():
|
||||
'''
|
||||
Command Line Parser.
|
||||
'''
|
||||
parser = argparse.ArgumentParser(description='Oversample DEM by integer factor.',
|
||||
formatter_class=customArgparseFormatter,
|
||||
epilog = '''
|
||||
|
||||
Example:
|
||||
|
||||
upsampleDem.py -i input.dem -o output.dem -f 4 4
|
||||
|
||||
This oversamples the input dem in both lat and lon by a factor of 4.''')
|
||||
parser.add_argument('-i','--input', type=str, required=True, help='Input ISCE DEM with a corresponding .xml file.', dest='infile')
|
||||
parser.add_argument('-o','--output',type=str, default=None, help='Output ISCE DEM with a corresponding .xml file.', dest='outfile')
|
||||
parser.add_argument('-m', '--method', type=str, default='BIQUINTIC', help='Interpolation method out of Akima / Biquintic. Default: biquintic.', dest='method')
|
||||
parser.add_argument('-f','--factor',type=int, nargs='+', required=True, help='Oversampling factor in lat and lon (or a single value for both).', dest='factor')
|
||||
|
||||
values = parser.parse_args()
|
||||
if len(values.factor) > 2:
|
||||
raise Exception('Factor should be a single number or a list of two. Undefined input for -f or --factor : '+str(values.factor))
|
||||
elif len(values.factor) == 1:
|
||||
values.factor = [values.factor[0], values.factor[0]]
|
||||
|
||||
return values
|
||||
|
||||
if __name__ == "__main__":
|
||||
inps = cmdLineParse()
|
||||
|
||||
if inps.infile.endswith('.xml'):
|
||||
inFileXml = inps.infile
|
||||
inFile = os.path.splitext(inps.infile)[0]
|
||||
else:
|
||||
inFile = inps.infile
|
||||
inFileXml = inps.infile + '.xml'
|
||||
|
||||
if inps.outfile.endswith('.xml'):
|
||||
outFile = os.path.splitext(inps.outfile)[0]
|
||||
else:
|
||||
outFile = inps.outfile
|
||||
|
||||
parser = createFileParser('xml')
|
||||
prop, fac, misc = parser.parse(inFileXml)
|
||||
|
||||
|
||||
inImage = createDemImage()
|
||||
inImage.init(prop,fac,misc)
|
||||
inImage.filename = inFile
|
||||
inImage.createImage()
|
||||
|
||||
upsampObj = UpsampleDem()
|
||||
upsampObj.method = inps.method
|
||||
upsampObj.setOutputFilename(outFile)
|
||||
upsampObj.upsampledem(demImage=inImage, yFactor=inps.factor[0], xFactor=inps.factor[1])
|
||||
|
|
@ -0,0 +1,75 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Walter Szeliga
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
from isce import logging
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
from iscesys.Component.FactoryInit import FactoryInit
|
||||
from isceobj.Renderer.XmlRenderer import XmlRenderer
|
||||
|
||||
class viewMetadataApp(FactoryInit):
|
||||
|
||||
def main(self):
|
||||
self.logger.info('Parsing Metadata')
|
||||
self.sensorObj.extractImage()
|
||||
frame = self.sensorObj.getFrame()
|
||||
instrument = frame.getInstrument()
|
||||
platform = instrument.getPlatform()
|
||||
orbit = frame.getOrbit()
|
||||
attitude = frame.getAttitude()
|
||||
print(platform)
|
||||
print(instrument)
|
||||
print(frame)
|
||||
print(orbit)
|
||||
for sv in orbit:
|
||||
print(sv)
|
||||
|
||||
print(attitude)
|
||||
for sv in attitude:
|
||||
print(sv)
|
||||
|
||||
self.logger.info('Rendering Metadata')
|
||||
self.renderer.setComponent(frame)
|
||||
self.renderer.render()
|
||||
|
||||
def __init__(self,arglist):
|
||||
FactoryInit.__init__(self)
|
||||
self.initFactory(arglist)
|
||||
self.logger = logging.getLogger('isce.viewMetadata')
|
||||
self.sensorObj = self.getComponent('Sensor')
|
||||
self.renderer = self.getComponent('XmlRenderer')
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
if (len(sys.argv) < 2):
|
||||
print("Usage:%s <xml-parameter file>" % sys.argv[0])
|
||||
sys.exit(1)
|
||||
runObj = viewMetadataApp(sys.argv[1:])
|
||||
runObj.main()
|
||||
|
|
@ -0,0 +1,137 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
from __future__ import print_function
|
||||
import isce
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
from contrib.demUtils.WaterMask import MaskStitcher
|
||||
import isceobj
|
||||
def main():
|
||||
#if not argument provided force the --help flag
|
||||
if(len(sys.argv) == 1):
|
||||
sys.argv.append('-h')
|
||||
|
||||
# Use the epilog to add usege eamples
|
||||
epilog = 'Usage examples:\n\n'
|
||||
epilog += 'mask.py -a stitch -i dem.xml -r -n your_username -w your_password -u https://aria-dav.jpl.nasa.gov/repository/products \n\n'
|
||||
epilog += 'mask.py -a download -i dem.xml \n\n'
|
||||
epilog += 'mask.py -a stitch -i dem.xml -k -r -l\n'
|
||||
#set the formatter_class=argparse.RawDescriptionHelpFormatter othewise it splits the epilog lines with its own default format
|
||||
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,epilog=epilog)
|
||||
|
||||
parser.add_argument('-a', '--action', type = str, default = 'stitch', dest = 'action', help = 'Possible actions: stitch or download (default: %(default)s). ')
|
||||
parser.add_argument('-m', '--meta', type = str, default = 'xml', dest = 'meta', help = 'What type of metadata file is created. Possible values: \
|
||||
xml or rsc (default: %(default)s)')
|
||||
parser.add_argument('-i', '--input', type=str, required=True, dest='indem', help='Input DEM for which the land water mask is desired.')
|
||||
parser.add_argument('-k', '--keep', action = 'store_true', dest = 'keep', help = 'If the option is present then the single files used for stitching are kept. If -l or --local is specified than the flag is automatically set (default: %(default)s)')
|
||||
parser.add_argument('-r', '--report', action = 'store_true', dest = 'report', help = 'If the option is present then failed and succeeded downloads are printed (default: %(default)s)')
|
||||
parser.add_argument('-l', '--local', action = 'store_true', dest = 'local', help = 'If the option is present then use the files that are in the location \
|
||||
specified by --dir. If not present --dir indicates the directory where the files are downloaded (default: %(default)s)')
|
||||
parser.add_argument('-d', '--dir', type = str, dest = 'dir', default = './', help = 'If used in conjunction with --local it specifies the location where the DEMs are located \
|
||||
otherwise it specifies the directory where the DEMs are downloaded and the stitched DEM is generated (default: %(default)s)')
|
||||
|
||||
parser.add_argument('-o', '--output', type = str, dest = 'output', default = None, help = 'Name of the output file to be created in --dir. If not provided the system generates one based on the bbox extremes')
|
||||
parser.add_argument('-n', '--uname', type = str, dest = 'uname', default = None, help = 'User name if using a server that requires authentication')
|
||||
parser.add_argument('-w', '--password', type = str, dest = 'password', default = None, help = 'Password if using a server that requires authentication')
|
||||
parser.add_argument('-u', '--url', type = str, dest = 'url', default = None, help = 'Part of the url where the DEM files are located. The actual location must be \
|
||||
the one specified by --url plus /srtm/version2_1/SRTM(1,3)')
|
||||
|
||||
|
||||
args = parser.parse_args()
|
||||
#first get the url,uname and password since are needed in the constructor
|
||||
|
||||
|
||||
ds = MaskStitcher()
|
||||
ds.configure()
|
||||
if(args.url):
|
||||
ds.setUrl(args.url)
|
||||
ds.setUsername(args.uname)
|
||||
ds.setPassword(args.password)
|
||||
ds._keepAfterFailed = True
|
||||
#avoid to accidentally remove local file if -k is forgotten
|
||||
#if one wants can remove them manually
|
||||
if(args.local):
|
||||
args.keep = True
|
||||
if(args.meta == 'xml'):
|
||||
ds.setCreateXmlMetadata(True)
|
||||
elif(args.meta == 'rsc'):
|
||||
ds.setCreateRscMetadata(True)
|
||||
|
||||
ds.setUseLocalDirectory(args.local)
|
||||
|
||||
|
||||
####Parse input DEM xml to get bbox
|
||||
inimg = isceobj.createDemImage()
|
||||
inimg.load(args.indem + '.xml')
|
||||
|
||||
north = inimg.coord2.coordStart
|
||||
south = north + inimg.coord2.coordDelta * (inimg.length-1)
|
||||
|
||||
west = inimg.coord1.coordStart
|
||||
east = west + inimg.coord1.coordDelta * (inimg.width-1)
|
||||
|
||||
bbox = [south,north,west,east]
|
||||
|
||||
|
||||
ds.setWidth(inimg.width)
|
||||
ds.setLength(inimg.length)
|
||||
ds.setFirstLatitude(north)
|
||||
ds.setFirstLongitude(west)
|
||||
ds.setLastLatitude(south)
|
||||
ds.setLastLongitude(east)
|
||||
|
||||
if(args.action == 'stitch'):
|
||||
lat = bbox[0:2]
|
||||
lon = bbox[2:4]
|
||||
if (args.output is None):
|
||||
args.output = ds.defaultName(bbox)
|
||||
|
||||
if not(ds.stitchMasks(lat,lon,args.output,args.dir,keep=args.keep)):
|
||||
print('Some tiles are missing. Maybe ok')
|
||||
|
||||
elif(args.action == 'download'):
|
||||
lat = bbox[0:2]
|
||||
lon = bbox[2:4]
|
||||
ds.getMasksInBox(lat,lon,args.dir)
|
||||
|
||||
else:
|
||||
print('Unrecognized action -a or --action',args.action)
|
||||
return
|
||||
|
||||
if(args.report):
|
||||
for k,v in ds._downloadReport.items():
|
||||
print(k,'=',v)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
|
@ -0,0 +1,66 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2020
|
||||
#
|
||||
|
||||
|
||||
import sys
|
||||
import isce
|
||||
from isceobj.Alos2Proc.runDownloadDem import download_wbd
|
||||
|
||||
|
||||
def download_wbd_old(snwe):
|
||||
'''
|
||||
for keeping the option of the old wbd.py
|
||||
'''
|
||||
|
||||
from isceobj.InsarProc.runCreateWbdMask import runCreateWbdMask
|
||||
|
||||
class INSAR:
|
||||
def __init__(self):
|
||||
self.applyWaterMask = True
|
||||
self.wbdImage = None
|
||||
|
||||
class SELF:
|
||||
def __init__(me, snwe):
|
||||
me.geocode_bbox = snwe
|
||||
me.insar = INSAR()
|
||||
|
||||
class INFO:
|
||||
def __init__(self, snwe):
|
||||
self.extremes = snwe
|
||||
def getExtremes(x):
|
||||
return self.extremes
|
||||
|
||||
self = SELF(snwe)
|
||||
info = INFO(None)
|
||||
runCreateWbdMask(self,info)
|
||||
|
||||
|
||||
if __name__=="__main__":
|
||||
|
||||
if len(sys.argv) < 5:
|
||||
print()
|
||||
print("usage: wbd.py s n w e [c]")
|
||||
print(" s: south latitude bounds in degrees")
|
||||
print(" n: north latitude bounds in degrees")
|
||||
print(" w: west longitude bounds in degrees")
|
||||
print(" e: east longitude bounds in degrees")
|
||||
print(" c: whether correct missing water body tiles problem")
|
||||
print(" 0: False")
|
||||
print(" 1: True (default)")
|
||||
sys.exit(0)
|
||||
|
||||
doCorrection = True
|
||||
if len(sys.argv) >= 6:
|
||||
if int(sys.argv[5]) == 0:
|
||||
doCorrection = False
|
||||
|
||||
snwe = list(map(float,sys.argv[1:5]))
|
||||
|
||||
if doCorrection:
|
||||
download_wbd(snwe[0], snwe[1], snwe[2], snwe[3])
|
||||
else:
|
||||
download_wbd_old(snwe)
|
||||
|
|
@ -0,0 +1,111 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
import isce
|
||||
import logging
|
||||
import logging.config
|
||||
from iscesys.Component.Application import Application
|
||||
from iscesys.Component.Component import Component
|
||||
from contrib.demUtils.SWBDStitcher import SWBDStitcher
|
||||
|
||||
import os
|
||||
STITCHER = Application.Facility(
|
||||
'_stitcher',
|
||||
public_name='wbd stitcher',
|
||||
module='contrib.demUtils',
|
||||
factory='createSWBDStitcher',
|
||||
args=('awbdstitcher',),
|
||||
mandatory=True,
|
||||
doc="Water body stitcher"
|
||||
)
|
||||
class Stitcher(Application):
|
||||
def main(self):
|
||||
# prevent from deliting local files
|
||||
if(self._stitcher._useLocalDirectory):
|
||||
self._stitcher._keepAfterFailed = True
|
||||
self._stitcher._keepWbds = True
|
||||
# is a metadata file is created set the right type
|
||||
if(self._stitcher._meta == 'xml'):
|
||||
self._stitcher.setCreateXmlMetadata(True)
|
||||
|
||||
# check for the action to be performed
|
||||
if(self._stitcher._action == 'stitch'):
|
||||
if(self._stitcher._bbox):
|
||||
lat = self._stitcher._bbox[0:2]
|
||||
lon = self._stitcher._bbox[2:4]
|
||||
if (self._stitcher._outputFile is None):
|
||||
self._stitcher._outputFile = self._stitcher.defaultName(self._stitcher._bbox)
|
||||
|
||||
if not(self._stitcher.stitchWbd(lat,lon,self._stitcher._outputFile,self._stitcher._downloadDir, \
|
||||
keep=self._stitcher._keepWbds)):
|
||||
print('Could not create a stitched water body mask. Some tiles are missing')
|
||||
|
||||
else:
|
||||
print('Error. The "bbox" attribute must be specified when the action is "stitch"')
|
||||
raise ValueError
|
||||
elif(self._stitcher._action == 'download'):
|
||||
if(self._stitcher._bbox):
|
||||
lat = self._stitcher._bbox[0:2]
|
||||
lon = self._stitcher._bbox[2:4]
|
||||
self._stitcher.getWbdsInBox(lat,lon,self._stitcher._downloadDir)
|
||||
|
||||
else:
|
||||
print('Unrecognized action ',self._stitcher._action)
|
||||
return
|
||||
|
||||
if(self._stitcher._report):
|
||||
for k,v in list(self._stitcher._downloadReport.items()):
|
||||
print(k,'=',v)
|
||||
|
||||
def Usage(self):
|
||||
print("\nUsage: wbdStitcher.py input.xml\n")
|
||||
|
||||
facility_list = (STITCHER,)
|
||||
|
||||
@property
|
||||
def stitcher(self):
|
||||
return self._stitcher
|
||||
@stitcher.setter
|
||||
def stitcher(self,stitcher):
|
||||
self._stitcher = stitcher
|
||||
|
||||
family = 'wbdstitcher'
|
||||
|
||||
def __init__(self,family = '', name = ''):
|
||||
super(Stitcher, self).__init__(family if family else self.__class__.family, name=name)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import sys
|
||||
ds = Stitcher('wbdstitcher')
|
||||
ds.configure()
|
||||
ds.run()
|
||||
|
|
@ -0,0 +1,46 @@
|
|||
#!/usr/bin/env python3
|
||||
import sys
|
||||
import argparse
|
||||
import os
|
||||
def main():
|
||||
args = parse()
|
||||
wisdom0 = 'wisdom0'
|
||||
wisdom1 = 'wisdom1'
|
||||
which = 0
|
||||
for t in args.type:
|
||||
for p in args.place:
|
||||
for d in args.direction:
|
||||
size = args.sizes[0]
|
||||
while size <= args.sizes[1]:
|
||||
if which == 0:
|
||||
if args.action == 'new':
|
||||
append = ''
|
||||
elif args.action == 'append':
|
||||
append = '-w ' + args.file
|
||||
else:
|
||||
print('Error. Unrecognized action',args.action)
|
||||
raise Exception
|
||||
else:
|
||||
append = '-w wisdom' + str(which%2)
|
||||
command = 'fftwf-wisdom -n ' + append + ' -o wisdom' + str((which+1)%2) + ' ' + t + p + d + str(size)
|
||||
print("command = ", command)
|
||||
os.system(command)
|
||||
#print(command)
|
||||
size *= 2
|
||||
which += 1
|
||||
os.system('mv wisdom' + str(which%2) + ' ' + args.file)
|
||||
os.system('rm wisdom' + str((which+1)%2))
|
||||
|
||||
|
||||
def parse():
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument('-a', '--action', type = str, default = 'new', dest = 'action', help = 'What to do: new create a new wisdom file, appends it appends from the -f.')
|
||||
parser.add_argument('-f', '--file', type = str, default = 'isce_wisdom.txt', dest = 'file', help = 'File name for wisdom file.')
|
||||
parser.add_argument('-t', '--type', type = str, default = 'cr', dest = 'type', help = 'Type of fftw data c = complex r = real.')
|
||||
parser.add_argument('-p', '--place', type = str, default = 'io', dest = 'place', help = 'Type of fftw place i = in place o = out of place.')
|
||||
parser.add_argument('-d', '--direction', type = str, default = 'fb', dest = 'direction', help = 'Type of fftw direction f = forward b = backward.')
|
||||
parser.add_argument('-s', '--sizes', type = int,nargs = '+', default = [32,65536], dest = 'sizes', help = 'Min and max.')
|
||||
return parser.parse_args()
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
|
|
@ -0,0 +1,907 @@
|
|||
#!/usr/bin/env python3
|
||||
"""
|
||||
The main code. This code will look at the command line arguments. If
|
||||
an invalid number of arguments are given, it will return an error.
|
||||
Otherwise, it will read the commandline arguments. If one argument
|
||||
is given, the code will assume the class name is the same as the
|
||||
module name, and try to import the class. Otherwise, it will import
|
||||
the given class from the given module and try to make an instance
|
||||
of it.
|
||||
This code will first try to run ._parameters and ._facilities
|
||||
method of the instance. Then, it will check the dictionaryOfVariables
|
||||
of the Insar class to see what components may be required. If it is
|
||||
not empty, it will make a GUI with the following components:
|
||||
- Label to indicate the component name, and whether or not its optional
|
||||
- An entry box for the user to input the value for the component
|
||||
- Buttons for each facility to allow user to
|
||||
change the component of each one
|
||||
- A Save button to save the component values, as well as the components
|
||||
of the facilities that the user has saved
|
||||
- A button to switch between saving a single xml file or saving
|
||||
the xml file using multiple xml files
|
||||
- A Reset all button, which resets all the inputted data in program
|
||||
- A button to allow the user to use an existing xml file to change
|
||||
data
|
||||
- A quit button to quit the GUI
|
||||
|
||||
Global Variables Used: parameters, dictionaryOfFacilities, facilityButtons,
|
||||
facilityDirs, classInstance, description, allParams,
|
||||
singleFile, directory, facilityParams
|
||||
|
||||
"""
|
||||
import sys
|
||||
import os
|
||||
from StringIO import StringIO
|
||||
import Tkinter as tk
|
||||
import tkFileDialog, tkMessageBox, tkFont
|
||||
import xml.etree.ElementTree as ElementTree
|
||||
|
||||
import isce
|
||||
from iscesys.Compatibility import Compatibility
|
||||
Compatibility.checkPythonVersion()
|
||||
#from insarApp import Insar
|
||||
import traceback
|
||||
from xml.parsers.expat import ExpatError
|
||||
|
||||
"""
|
||||
Global Definitions:
|
||||
|
||||
classInstance - The instance of Insar that is created. This is the instance
|
||||
which has the dictionaryOfVariables and dictionaryOfFacilities
|
||||
attributes.
|
||||
|
||||
allParams - A dictionary of dictionaries containing all the parameters that
|
||||
have been set so far.
|
||||
|
||||
parameters - a list containing class instances of class parameter, used to
|
||||
access the user entry and the name and whether or not it is
|
||||
optional in a clean manner.
|
||||
|
||||
description - a description of variables for parameters
|
||||
|
||||
|
||||
facilityParams - a list containing instances of class parameter, used
|
||||
to access the user entry for the facility's parameter
|
||||
more easily, similar to global variable parameters.
|
||||
|
||||
dictionaryOfFaciliites - the dictionaryOfFacilities, contains the names
|
||||
of all the facilities, as well as its factorymodule,
|
||||
which is the path to the module containing its
|
||||
factoryname, which creates an instance of the
|
||||
facility
|
||||
|
||||
facilitiyButtons - The buttons, which causes a GUI for the facility to pop up
|
||||
when pressed. They are disabled when a facility GUI is
|
||||
already present.
|
||||
|
||||
facilityDirs - A dictionary containing the locations that the
|
||||
user saved the xml file for each key, which is the
|
||||
facility name.
|
||||
|
||||
root2 - The Tk instance for the second GUI, whcih should be the
|
||||
GUI for the facility's parameters.
|
||||
|
||||
rootName - The name that the component in the xml is saved under.
|
||||
This value is either the name of a facility or 'insarApp'.
|
||||
|
||||
directory - The directory at which the most recent file was saved.
|
||||
|
||||
singleFile - A boolean which indicates whether or not to save
|
||||
the final XML file as a single file or multiple XML in
|
||||
catalog format.
|
||||
"""
|
||||
|
||||
class RefactorWarning(DeprecationWarning):
|
||||
"""put in to alert uses that the code needs to be refactored.
|
||||
Take out the raising if you don't like it"""
|
||||
pass
|
||||
|
||||
class parameter:
|
||||
"""Class parameter used to keep track of a parameter and its related objects
|
||||
|
||||
Class Members:
|
||||
key: The name of the parameter
|
||||
text: The text widget used for inputting data of this parameter
|
||||
optional: Indicates whether or not this parameter is optional
|
||||
attrib: The name this parameter has as an Insar class attribute
|
||||
"""
|
||||
def __init__(self, key=None, text=None, optional=None, attrib = None):
|
||||
self.key = key
|
||||
self.text = text
|
||||
self.optional = optional
|
||||
self.attrib = attrib
|
||||
|
||||
def indent(elem, level=0):
|
||||
"""Indent an XML ElementTree"""
|
||||
i = "\n" + level*" "
|
||||
if len(elem):
|
||||
if not elem.text or not elem.text.strip():
|
||||
elem.text = i + " "
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
for elem in elem:
|
||||
indent(elem, level+1)
|
||||
if not elem.tail or not elem.tail.strip():
|
||||
elem.tail = i
|
||||
else:
|
||||
if level and (not elem.tail or not elem.tail.strip()):
|
||||
elem.tail = i
|
||||
|
||||
|
||||
## Creates the Input XML file given the user's inputs.
|
||||
## If the user has missed a mandatory field in the current level GUI,
|
||||
## this will cause a pop-up box to appear and tell the user to
|
||||
## fill in the mandatory fields. Otherwise, it will ask the
|
||||
## user for a directory to save the xml file in and create the
|
||||
## xml file given their inputs. If making the final xml file,
|
||||
## i.e the input file for the insarApp, it will also add any
|
||||
## directories created by using a catalog.
|
||||
##
|
||||
## global variables used - directory, facilityDirs, facilityButtons,
|
||||
## singleFile, allParams
|
||||
def createInputXML(parameters, rootName):
|
||||
"""Creates the Input XML File given the user inputs
|
||||
Arguments:
|
||||
parameters - A list of parameters to be inputted into the xml file
|
||||
rootName - The name of the root
|
||||
"""
|
||||
# Get necessary global variables
|
||||
global directory
|
||||
global facilityDirs
|
||||
global facilityButtons
|
||||
global facilityRequired
|
||||
global singleFile
|
||||
global allParams
|
||||
# Checks if any of the manadatory fields are blank.
|
||||
for param in parameters:
|
||||
if(not(param.optional) and param.text.get()==''):
|
||||
tkMessageBox.showerror('ERROR!', 'Mandatory Field(s) is blank!')
|
||||
return False
|
||||
# If rootName is insarApp, and it is in multi file XML mode,
|
||||
# then the user should have, by either loading an XML which is
|
||||
# in that form or creating multiple files, a file for each facility.
|
||||
if(rootName == 'insarApp' and not singleFile):
|
||||
for x in zip(facilityButtons,facilityRequired):
|
||||
button = x[0]
|
||||
req = x[1]
|
||||
try:
|
||||
if(facilityDirs[button.cget('text')]=='' and req):
|
||||
raise KeyError
|
||||
except KeyError:
|
||||
tkMessageBox.showerror('ERROR!',
|
||||
'Facility parameters not saved in a file for:\n' +
|
||||
button.cget('text'))
|
||||
return False
|
||||
# If rootName is insarApp and it is in single file XML mode,
|
||||
# then the user should have, by either loading an XML file or
|
||||
# by inputting and saving, have data for each facility.
|
||||
elif(rootName == 'insarApp' and singleFile):
|
||||
for x in zip(facilityButtons,facilityRequired):
|
||||
button = x[0]
|
||||
req = x[1]
|
||||
try:
|
||||
if(allParams[button.cget('text')] == {} and req):
|
||||
raise KeyError
|
||||
except KeyError:
|
||||
tkMessageBox.showerror('ERROR!',
|
||||
'Facility parameters not set in:\n' +
|
||||
button.cget('text'))
|
||||
return False
|
||||
# Get a directory from the user to save in if we are in multi file XML
|
||||
# mode and/or is saving the insarApp input file.
|
||||
if(not singleFile or rootName == 'insarApp'):
|
||||
directory = tkFileDialog.asksaveasfilename(initialfile=rootName+'.xml',
|
||||
title="Choose where to save:",
|
||||
defaultextension='.xml',
|
||||
filetypes=[('xml files', '.xml')])
|
||||
if(not directory):
|
||||
return False
|
||||
else:
|
||||
# Create the input xml file using ElementTree.
|
||||
top = ElementTree.Element(rootName)
|
||||
top.text='\n'
|
||||
root = ElementTree.SubElement(top,'component', {'name':rootName})
|
||||
for param in parameters:
|
||||
if(param.text.get()!=''):
|
||||
property = ElementTree.SubElement(root,'property', {'name':param.key})
|
||||
value = ElementTree.SubElement(property,'value')
|
||||
value.text = param.text.get()
|
||||
# If this is the insarApp input file, we must put the
|
||||
# directory of all the input xml files for the facilities
|
||||
if(rootName == 'insarApp'):
|
||||
# If we are in sigleFile mode, write all the parameters
|
||||
# into the file that we were writing to.
|
||||
if singleFile:
|
||||
for key in allParams.keys():
|
||||
if allParams[key]:
|
||||
facility = ElementTree.SubElement(root, 'component', {'name':key})
|
||||
for paramKey in allParams[key].keys():
|
||||
if allParams[key][paramKey]:
|
||||
param = ElementTree.SubElement(facility, 'property',
|
||||
{'name':paramKey})
|
||||
value = ElementTree.SubElement(param, 'value')
|
||||
value.text = allParams[key][paramKey]
|
||||
# Otherwise, write the directory of each facility into
|
||||
# the file that we were writing to.
|
||||
else:
|
||||
for key in facilityDirs.keys():
|
||||
if facilityDirs[key]:
|
||||
property = ElementTree.SubElement(root, 'component', {'name':key})
|
||||
catalog = ElementTree.SubElement(property, 'catalog')
|
||||
catalog.text = facilityDirs[key]
|
||||
# Write the file using ElementTree
|
||||
# If the file we are saving is the insarApp input file,
|
||||
# we want insarApp tag on top of it. Otherwise, just
|
||||
# put the data in to the xml file
|
||||
if(rootName == 'insarApp'):
|
||||
tempTree = ElementTree.ElementTree(root)
|
||||
indent(tempTree.getroot())
|
||||
tree = ElementTree.ElementTree(top)
|
||||
else:
|
||||
tree = ElementTree.ElementTree(root)
|
||||
indent(tree.getroot())
|
||||
tree.write(directory)
|
||||
# Since the user is saving a facility in the single file XML mode,
|
||||
# save the values in the global variable allParams
|
||||
else:
|
||||
allParams[rootName] = {}
|
||||
for param in parameters:
|
||||
allParams[rootName][param.key] = param.text.get()
|
||||
return True
|
||||
|
||||
|
||||
## Creates the input XML for a toplevel GUI, which
|
||||
## should be for the facility's components. After
|
||||
## saving the XML file, it will exit the toplevel
|
||||
## GUI and save the directory that it was saved to
|
||||
## in a dictionary with the key as the name of the
|
||||
## facility.
|
||||
##
|
||||
## global variables used - facilityComponents, dir, rootName, facilityDirs
|
||||
def facilityInputXML():
|
||||
"""Creates an XML file for a facility's parameters"""
|
||||
global facilityParams
|
||||
global directory
|
||||
global rootName
|
||||
global facilityDirs
|
||||
# Create the XML using the facilityParameters
|
||||
# and the rootName, which was set as the facility name
|
||||
# when the facility GUI was made
|
||||
if(createInputXML(facilityParams, rootName)):
|
||||
facilityQuit()
|
||||
if(directory):
|
||||
facilityDirs[rootName] = directory
|
||||
return
|
||||
|
||||
|
||||
## Creates the input XML for insarApp, which is
|
||||
## at the root.
|
||||
def componentInputXML():
|
||||
"""Creates an XML file for the InsarApp"""
|
||||
global parameters
|
||||
global facilityDirs
|
||||
createInputXML(parameters, 'insarApp')
|
||||
|
||||
###The event that is called when a facilityButton is
|
||||
## pressed by the user. When the button is pressed,
|
||||
## the code will first try to create an instance of
|
||||
## the class using the argument given in the
|
||||
## dictionaryOfFacilities and the method given in it.
|
||||
## If it fails, it will return an error
|
||||
## message, indicating a matching argument for the method
|
||||
## was not found. If it succeeds, it will disable the facility
|
||||
## buttons, since we can only have one other GUI open at once.
|
||||
## Then, it will also disable the inputs to the components,
|
||||
## since those should not be changed, since the facility could
|
||||
## depend on the values. It will then proceed to make
|
||||
## a GUI with entries for each component found in the
|
||||
## attribute dictionaryOfVariables of the instance.
|
||||
def facilityEvent(event):
|
||||
"""Creates a pop-up GUI for inputting facility parameters"""
|
||||
# Load all the global variables used in this function
|
||||
global parameters
|
||||
global dictionaryOfFacilities
|
||||
global facilityButtons
|
||||
global facilityParams
|
||||
global rootName
|
||||
global root2
|
||||
global classInstance
|
||||
global singleFile
|
||||
global allParams
|
||||
global facilityDocs
|
||||
# Find which facility button the user pressed
|
||||
# through its text, and set it as the rootName
|
||||
text = event.widget.cget('text')
|
||||
rootName = text
|
||||
# Initiate instance as None
|
||||
instance = None
|
||||
# Initiate a StringIO and set it as stdout to
|
||||
# catch any error messages the factory
|
||||
# method produces
|
||||
temp = sys.stdout
|
||||
errorStr = StringIO('')
|
||||
sys.stdout = errorStr
|
||||
# Call the parameters method to restore the
|
||||
# default value of facilities
|
||||
try:
|
||||
classInstance._parameters()
|
||||
except:
|
||||
pass
|
||||
for param in parameters:
|
||||
if param.text.get():
|
||||
# exec 'classInstance.' + param.attrib + '= \'' + param.text.get() + '\''
|
||||
setattr(classInstance, param.attrib, eval('\'' + param.text.get() + '\''))
|
||||
|
||||
pass
|
||||
pass
|
||||
try:
|
||||
classInstance._facilities()
|
||||
except:
|
||||
pass
|
||||
# Try to use the arguments in the dictionaryOfFacilities
|
||||
# to instantiate an instance of the facility
|
||||
try:
|
||||
args = dictionaryOfFacilities[text]['args']
|
||||
kwargs = dictionaryOfFacilities[text]['kwargs']
|
||||
# May need to be modified if a factory takes
|
||||
# the None argument
|
||||
modified = ['']*len(args)
|
||||
for i in range(0, len(args)):
|
||||
if(args[i] == None):
|
||||
modified[i] = 'None'
|
||||
else:
|
||||
modified[i] = args[i]
|
||||
pass
|
||||
pass
|
||||
modified = tuple(modified)
|
||||
# raise RefactorWarning("refactor with appy built-in")
|
||||
instance = eval(
|
||||
dictionaryOfFacilities[text]['factoryname']+'(*' + modified.__str__() + ', **' +
|
||||
kwargs.__str__() + ')'
|
||||
)
|
||||
except Exception as e:
|
||||
traceback.print_exc(file=sys.stdout)
|
||||
tkMessageBox.showerror('ERROR!', 'Unknown error occurred:\n'+errorStr.getvalue()+'\n%s' %e)
|
||||
return None
|
||||
# If the instance is still none, this means
|
||||
# that an error message was produced, and
|
||||
# that it failed to make an instance.
|
||||
# Print out the error message
|
||||
# produced, which is contained in the StringIO
|
||||
sys.stdout = temp
|
||||
if instance is None:
|
||||
tkMessageBox.showerror('ERROR!', 'Bad argument for: ' +
|
||||
dictionaryOfFacilities[text]['factoryname'] +
|
||||
'\n' + errorStr.getvalue())
|
||||
return
|
||||
# Try to run the ._parameters() and ._facilities()
|
||||
# methods of the instance, and then get its
|
||||
# dictionaryOfVariables
|
||||
try:
|
||||
instance._parameters()
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
instance._facilities()
|
||||
except:
|
||||
pass
|
||||
dictionaryOfVariables = None
|
||||
try:
|
||||
dictionaryOfVariables = instance.dictionaryOfVariables
|
||||
except:
|
||||
pass
|
||||
# Check if the dictionaryOfVariables is empty or does not exist
|
||||
if (dictionaryOfVariables is None or dictionaryOfVariables == {}):
|
||||
# Create a Popup Error message
|
||||
sys.stdout = sys.stderr
|
||||
tkMessageBox.showerror('ERROR!', 'DictionaryOfVariables for ' +
|
||||
text + ' is empty! Nothing to do...')
|
||||
return
|
||||
# Disable all the facilityButtons b/c multiple facility
|
||||
# GUI's are not supported
|
||||
for button in facilityButtons:
|
||||
button.config(state='disabled')
|
||||
for param in parameters:
|
||||
param.text.config(state='disabled')
|
||||
XMLButton.config(state='disabled')
|
||||
# Create the new facility GUI
|
||||
root2 = tk.Toplevel()
|
||||
root2.protocol("WM_DELETE_WINDOW",facilityQuit)
|
||||
root2.title('Facility '+text+ ' Component Editor')
|
||||
tempFont = ('Times New Roman', 14)
|
||||
# Create a font with underlines
|
||||
uFont = tkFont.Font(family='Times New Roman', size=14, underline=True)
|
||||
# First column gives the name
|
||||
nameLabel = tk.Label(root2, text='Name (Click a name for help)', font=uFont)
|
||||
# Second column allows user to input values for each attribute
|
||||
valueLabel = tk.Label(root2, text='Value', font=uFont)
|
||||
# The third column is for units
|
||||
unitsLabel = tk.Label(root2, text='Units', font=uFont)
|
||||
# The fourth column indicates to users whether or not an
|
||||
# attribute is optional or mandatory.
|
||||
requiredLabel = tk.Label(root2, text='Optional/Mandatory', font=uFont)
|
||||
# Put each label in respective locations
|
||||
nameLabel.grid(row=0, column=0)
|
||||
valueLabel.grid(row=0, column=1)
|
||||
unitsLabel.grid(row=0, column=2)
|
||||
requiredLabel.grid(row=0, column=3)
|
||||
r = 1
|
||||
# Reset facilityParams, since we are using a new
|
||||
# facility
|
||||
facilityParams = []
|
||||
try:
|
||||
units = instance.unitsOfVariables
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
facilityDocs = instance.descriptionOfVariables
|
||||
except:
|
||||
pass
|
||||
for key in dictionaryOfVariables.keys():
|
||||
label = tk.Label(root2, text=key)
|
||||
label.grid(row=r, column=0)
|
||||
if(dictionaryOfVariables[key][2].lower() == 'optional'):
|
||||
opt = tk.Label(root2, text='Optional', fg='green')
|
||||
facilityParams.append(parameter(key, tk.Entry(root2), True))
|
||||
else:
|
||||
opt = tk.Label(root2, text='Mandatory', fg='red')
|
||||
facilityParams.append(parameter(key, tk.Entry(root2), False))
|
||||
try:
|
||||
label = tk.Label(root2, text=units[key])
|
||||
label.grid(row=r, column=2)
|
||||
except:
|
||||
pass
|
||||
button = tk.Button(root2, text=key, width=25)
|
||||
button.bind('<ButtonRelease>', facilityHelp)
|
||||
button.grid(row=r, column=0)
|
||||
opt.grid(row=r, column=3)
|
||||
facilityParams[r-1].text.grid(row=r, column=1)
|
||||
r = r + 1
|
||||
# Put the known arguments into the entry boxes before outputting
|
||||
# them, and also check for any "trash" values inside the dictionary
|
||||
# that could occur from loading an xml file with incorrect facility
|
||||
# parameters
|
||||
temp = {}
|
||||
temp[text] = {}
|
||||
for param in facilityParams:
|
||||
try:
|
||||
param.text.insert(0, allParams[text][param.key])
|
||||
temp[text][param.key] = allParams[text][param.key]
|
||||
except:
|
||||
pass
|
||||
allParams[text] = temp[text]
|
||||
# Create a quit and save button, as well as a dir button so
|
||||
# that the user can load a directory and use that as their
|
||||
# facility XML file
|
||||
quitButton = tk.Button(root2, text='Quit', command=facilityQuit)
|
||||
saveButton = tk.Button(root2, text='Save', command=facilityInputXML)
|
||||
dirButton = tk.Button(root2, text='Use An Existing\n XML File',
|
||||
command=getFacilityDirectory)
|
||||
quitButton.grid(row=r, column=2)
|
||||
saveButton.grid(row=r, column=1)
|
||||
dirButton.grid(row=r, column=0)
|
||||
root2.mainloop()
|
||||
|
||||
def facilityHelp(event):
|
||||
"""Creates help documentation for the facility GUI"""
|
||||
global facilityDocs
|
||||
text = event.widget.cget('text')
|
||||
if(text in facilityDocs.keys() and facilityDocs[text] != ''):
|
||||
tkMessageBox.showinfo(text+' documentation:', description[text])
|
||||
else:
|
||||
tkMessageBox.showerror('Documentation Not Found!', 'There is no documentation\nfor this parameter')
|
||||
|
||||
|
||||
## This method is called when the button for using an already existing
|
||||
## XML file is clicked on the facility GUI. The method tries to open
|
||||
## the xml file given, and stores the data in the global variable
|
||||
## allParams, as well as populate them in the GUI's entry boxes.
|
||||
##
|
||||
## Global Variables Used: rootName, facilityDirs, facilityParams
|
||||
def getFacilityDirectory():
|
||||
"""Gets the directory for the xml used for the facility's parameter"""
|
||||
global rootName
|
||||
global facilityDirs
|
||||
global facilityParams
|
||||
directory = tkFileDialog.askopenfilename(title='Locate Your XML File for '
|
||||
+ rootName, defaultextension='.xml',
|
||||
filetypes=[('xml files', '.xml')])
|
||||
if(directory):
|
||||
try:
|
||||
tree = ElementTree.parse(directory)
|
||||
value = ''
|
||||
name = ''
|
||||
for property in tree.findall('property'):
|
||||
name = property.attrib['name']
|
||||
value = property.find('value').text
|
||||
for param in facilityParams:
|
||||
if param.key == name:
|
||||
param.text.delete(0, tk.END)
|
||||
param.text.insert(0, value)
|
||||
allParams[rootName][param.key] = value
|
||||
name = ''
|
||||
break
|
||||
if name != '':
|
||||
tkMessageBox.showerror('Error!', 'Invalid XML for'+
|
||||
rootName + ' facility!'
|
||||
+ '\nParameter ' + name +
|
||||
' does not exist in this facility!')
|
||||
return
|
||||
except ExpatError:
|
||||
tkMessageBox.showerror('Error!', 'Invalid XML error! XML is ill formed!')
|
||||
except Exception:
|
||||
tkMessageBox.showerror('Error!', 'Invalid XML error! XML is ill formed for ' + rootName + '!')
|
||||
facilityDirs[rootName] = directory
|
||||
|
||||
## This is the quit button event for the facility GUI. This
|
||||
## quits out of the for facility and reenables all the
|
||||
## buttons for the other facilities and entry boxes for
|
||||
## the components.
|
||||
##
|
||||
## Global Variables Used: facilityButtons, components, root2, XMLButton
|
||||
def facilityQuit():
|
||||
"""The button event for Quit button on facility GUI. This destroys the
|
||||
facility GUI and restores disabled buttons on main GUI."""
|
||||
root2.destroy()
|
||||
for button in facilityButtons:
|
||||
button.config(state='normal')
|
||||
for param in parameters:
|
||||
param.text.config(state='normal')
|
||||
XMLButton.config(state='normal')
|
||||
|
||||
def showDoc(event):
|
||||
"""Shows documentation for the parameter written on the button"""
|
||||
text = event.widget.cget('text')
|
||||
if(text in description.keys() and description[text] != ''):
|
||||
tkMessageBox.showinfo(text+' documentation:', description[text])
|
||||
else:
|
||||
tkMessageBox.showerror('Documentation Not Found!', 'There is no documentation\nfor this parameter')
|
||||
|
||||
def changeSave(event):
|
||||
"""Changes the save from single file save to multiple and vice versa"""
|
||||
global singleFile
|
||||
global facilityDirs
|
||||
singleFile = not singleFile
|
||||
if(singleFile):
|
||||
event.widget.configure(text='Currently:\nSingle XML File Mode')
|
||||
facilityDirs = {}
|
||||
else:
|
||||
event.widget.configure(text = 'Currently:\nMultiple XML Mode')
|
||||
return
|
||||
|
||||
def loadXML():
|
||||
"""Loads an XML file for the insarApp and stores the data"""
|
||||
global parameters
|
||||
global allParams
|
||||
global facilityDirs
|
||||
facilityDirs = {}
|
||||
# Get the directory from the user
|
||||
directory = ''
|
||||
directory = tkFileDialog.askopenfilename(title='Locate Your XML File:',
|
||||
defaultextension='.xml',
|
||||
filetypes=[('xml files', '.xml')])
|
||||
# If the user specified a directory, try loading it
|
||||
if directory:
|
||||
try:
|
||||
# Find the insarApp component which should have all the properties
|
||||
# and facilities
|
||||
tree = ElementTree.parse(directory).find('component')
|
||||
text = ''
|
||||
name = ''
|
||||
# First find all the parameters listed in the main GUI
|
||||
for property in tree.findall('property'):
|
||||
name = property.attrib['name']
|
||||
value = property.find('value').text
|
||||
for param in parameters:
|
||||
if param.key == name:
|
||||
param.text.delete(0, tk.END)
|
||||
param.text.insert(0, value)
|
||||
name = ''
|
||||
break
|
||||
pass
|
||||
if name:
|
||||
tkMessageBox.showerror('Error!', 'Invalid xml for these parameters!\n'+
|
||||
'Parameter ' + name + ' does not exist!')
|
||||
pass
|
||||
pass
|
||||
|
||||
# Then find the parameters for the facilities
|
||||
for facility in tree.findall('component'):
|
||||
exists = False
|
||||
facilityName = facility.attrib['name']
|
||||
for button in facilityButtons:
|
||||
if button.cget('text') == facilityName:
|
||||
exists = True
|
||||
pass
|
||||
pass
|
||||
if not exists:
|
||||
tkMessageBox.showerror('Error!', 'Invalid xml error! Facility '
|
||||
+ facilityName + ' does not exist!')
|
||||
return None
|
||||
# Check whether or not the xml is in catalog format or all-in-one
|
||||
# format
|
||||
catalog = None
|
||||
catalog = facility.find('catalog')
|
||||
allParams[facilityName] = {}
|
||||
# If there is a catalog, assume that the first component
|
||||
# contains every parameter of the facility
|
||||
if catalog is not None:
|
||||
catalog = catalog.text
|
||||
facilityDirs[facilityName] = catalog
|
||||
facilityTree = ElementTree.parse(catalog)
|
||||
for property in facilityTree.findall('property'):
|
||||
name = property.attrib['name']
|
||||
value = property.find('value').text
|
||||
allParams[facilityName][name] = value
|
||||
pass
|
||||
pass
|
||||
# Otherwise, go through the facility and get the parameters
|
||||
else:
|
||||
for property in facility.findall('property'):
|
||||
name = property.attrib['name']
|
||||
value = property.find('value').text
|
||||
allParams[facilityName][name] = value
|
||||
except IOError:
|
||||
tkMessageBox.showerror('Error!', 'Invalid XML error! One or more XML does not exist!')
|
||||
except ExpatError:
|
||||
tkMessageBox.showerror('Error!', 'Invalid XML error! XML is ill formed!')
|
||||
except Exception:
|
||||
tkMessageBox.showerror('Error!', 'Invalid XML error! XML is valid for insarApp!')
|
||||
return
|
||||
|
||||
|
||||
|
||||
def reset():
|
||||
"""After asking the user, resets everything in the code used for writing to an xml"""
|
||||
global allParams
|
||||
global facilityDirs
|
||||
global parameters
|
||||
global facilityButtons
|
||||
global root2
|
||||
# Ask the user if they want to reset everything
|
||||
answer = tkMessageBox.askyesno("Are you sure?", "Are you sure you want to reset all data?")
|
||||
if answer:
|
||||
# Delete all entries in the main GUI
|
||||
for param in parameters:
|
||||
param.text.delete(0, tk.END)
|
||||
# Erase all data stored for writing to XML's
|
||||
allParams = {}
|
||||
facilityDirs = {}
|
||||
# Make sure that all the main GUI buttons are enabled
|
||||
for button in facilityButtons:
|
||||
button.configure(state='normal')
|
||||
facilityDirs[button.cget('text')] = ''
|
||||
allParams[button.cget('text')] = {}
|
||||
XMLButton.config(state='normal')
|
||||
# If there is a facility GUI, get rid of it
|
||||
try:
|
||||
root2.destroy()
|
||||
except:
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
"""Builds the main GUI for making an XML input for given class"""
|
||||
# Get the global variable
|
||||
global parameters
|
||||
global dictionaryOfFacilities
|
||||
global facilityButtons
|
||||
global facilityRequired
|
||||
global facilityDirs
|
||||
global classInstance
|
||||
global description
|
||||
global allParams
|
||||
global singleFile
|
||||
global directory
|
||||
global facilityParams
|
||||
parameters = []
|
||||
facilityParams = []
|
||||
dictionaryOfFacilities = {}
|
||||
facilityButtons = []
|
||||
facilityRequired = []
|
||||
facilityDirs = {}
|
||||
root2 = None
|
||||
rootName = ''
|
||||
directory = ''
|
||||
allParams = {}
|
||||
|
||||
# Create an instance of Insar to run the _parameters() and
|
||||
# _facilities() function, if they exist, to create the
|
||||
# dictionaryOfVariables.
|
||||
try:
|
||||
if(len(sys.argv) != 2 and len(sys.argv) != 3):
|
||||
print("Invalid commandline arguments:")
|
||||
print("Usage 1, Module and Class have same names: xmlGenerator Module")
|
||||
print("Usage 2, Module and Class names different: xmlGenerator Module Class")
|
||||
print("(Module name should not include the '.py')")
|
||||
sys.exit()
|
||||
elif(len(sys.argv) == 2):
|
||||
if 'help' in sys.argv[1]:
|
||||
print("'Invalid commandline arguments:\nUsage: xmlGenerator [Module (sans '.py'] [Class]")
|
||||
# raise RefactorWarning("refactor with __import__ built-in")
|
||||
print("Assuming module name and class name are both, ", sys.argv[1])
|
||||
exec('from ' + sys.argv[1] + ' import ' + sys.argv[1])
|
||||
classInstance = eval(sys.argv[1] + '()')
|
||||
else:
|
||||
print("importing class %s from module %s" % (sys.argv[1], sys.argv[2]))
|
||||
# raise RefactorWarning("refactor with __import__ built-in")
|
||||
exec('from ' + sys.argv[1] + ' import ' + sys.argv[2])
|
||||
# print sys.argv[2]
|
||||
classInstance = eval(sys.argv[2] + '()')
|
||||
pass
|
||||
pass
|
||||
except ImportError as e:
|
||||
print("Invalid arguments!")
|
||||
print("Either the given module or the given class does not exist,")
|
||||
print("or you have assumed they both have the same name and they do not.")
|
||||
sys.exit()
|
||||
pass
|
||||
try:
|
||||
classInstance._parameters()
|
||||
classInstance._facilities()
|
||||
except:
|
||||
pass
|
||||
dictionaryOfVariables = classInstance.dictionaryOfVariables
|
||||
try:
|
||||
dictionaryOfFacilities = classInstance._dictionaryOfFacilities
|
||||
except:
|
||||
pass
|
||||
|
||||
# If the dictionaryOfVariables is not empty, create
|
||||
# the GUI
|
||||
if dictionaryOfVariables:
|
||||
|
||||
# Since Frame class does not have scrollbars, use a
|
||||
# canvas to create a scrollbar in the y direction
|
||||
root = tk.Tk()
|
||||
root.title(sys.argv[1] + ' Input XML File Generator')
|
||||
verticalBar = tk.Scrollbar(root)
|
||||
verticalBar.grid(row=0, column=1, sticky='N'+'S')
|
||||
|
||||
# Create the Canvas, which will have the scroll bar as
|
||||
# well as the frame. Change the width here to
|
||||
# change the starting width of the screen.
|
||||
canvas = tk.Canvas(root,
|
||||
yscrollcommand=verticalBar.set,
|
||||
width=1100, height=500)
|
||||
canvas.grid(row=0, column=0, sticky='N'+'S'+'E'+'W')
|
||||
verticalBar.config(command=canvas.yview)
|
||||
|
||||
root.grid_rowconfigure(0, weight=1)
|
||||
root.grid_columnconfigure(0, weight=1)
|
||||
|
||||
|
||||
frame = tk.Frame(canvas)
|
||||
frame.rowconfigure(1, weight=1)
|
||||
frame.columnconfigure(1, weight=1)
|
||||
# Begin creating the GUI involved with input variables
|
||||
# Create a font with underlines
|
||||
uFont = tkFont.Font(family='Times New Roman', size=14, underline=True)
|
||||
# Create a parameters label
|
||||
paramLabel = tk.Label(frame, text='Parameters:',
|
||||
font=("Times New Roman", 20, "bold"))
|
||||
# First column gives the name
|
||||
nameLabel = tk.Label(frame, text='Name (Click a name for help)', font=uFont)
|
||||
# Second column allows user to input values for each attribute
|
||||
valueLabel = tk.Label(frame, text='Value', font=uFont)
|
||||
# The third column is for units
|
||||
unitsLabel = tk.Label(frame, text='Units', font=uFont)
|
||||
# The fourth column indicates to users whether or not an
|
||||
# attribute is optional or mandatory.
|
||||
requiredLabel = tk.Label(frame, text='Optional/Mandatory', font=uFont)
|
||||
# Put each label in respective locations
|
||||
paramLabel.grid(row=0, column=0)
|
||||
nameLabel.grid(row=1, column=0, columnspan=2)
|
||||
valueLabel.grid(row=1, column=2)
|
||||
unitsLabel.grid(row=1, column=4)
|
||||
requiredLabel.grid(row=1, column=5)
|
||||
|
||||
# Create a variable for the row
|
||||
r = 2
|
||||
try:
|
||||
description = classInstance.descriptionOfVariables
|
||||
except:
|
||||
pass
|
||||
units = {}
|
||||
try:
|
||||
units = classInstance.unitsOfVariables
|
||||
except:
|
||||
pass
|
||||
for key in dictionaryOfVariables.keys():
|
||||
val = dictionaryOfVariables[key]
|
||||
# Make the label from the keys in the dictionary
|
||||
# Change the wraplength here for the names if it is too short or long.
|
||||
# label = tk.Label(frame, text=key, anchor = tk.W, justify=tk.LEFT, wraplength=100)
|
||||
# label.grid(row=r,column=0)
|
||||
# Indicate whether the attribute is optional or mandatory
|
||||
if(val[2].lower() == ('optional')):
|
||||
required = tk.Label(frame, text='Optional', fg='green')
|
||||
parameters.append(parameter(key, tk.Entry(frame, width=50), True, val[0]))
|
||||
else:
|
||||
required = tk.Label(frame, text='Mandatory', fg='red')
|
||||
parameters.append(parameter(key, tk.Entry(frame, width=50), False, val[0]))
|
||||
pass
|
||||
try:
|
||||
doc = tk.Button(frame, text=key, anchor = tk.W, justify=tk.LEFT, width=50,
|
||||
wraplength=348)
|
||||
doc.bind('<ButtonRelease>', showDoc)
|
||||
doc.grid(row=r, column=0, columnspan=2)
|
||||
except:
|
||||
pass
|
||||
try:
|
||||
unit = tk.Label(frame, text=units[key])
|
||||
unit.grid(row=r, column=2)
|
||||
except:
|
||||
pass
|
||||
required.grid(row=r,column=5)
|
||||
# Put the Entry in global variable, since it is needed
|
||||
# for saving inputted values into xml
|
||||
parameters[r-2].text.grid(row=r,column=2, columnspan=2)
|
||||
r = r + 1
|
||||
pass
|
||||
if dictionaryOfFacilities:
|
||||
# Add a label indicating that these buttons are facilities
|
||||
facilityLabel = tk.Label(frame, text='Facilities:',
|
||||
font=("Times New Roman", 20, "bold"),
|
||||
justify=tk.LEFT,
|
||||
anchor=tk.W)
|
||||
facilityLabel.grid(row=r, column=0)
|
||||
r = r + 1
|
||||
x = 0
|
||||
# Make the buttons to edit facility parameters and import
|
||||
# the required modules using the factorymodule
|
||||
for key in dictionaryOfFacilities.keys():
|
||||
facilityButtons.append(tk.Button(frame, text = key, width=50, justify=tk.LEFT,
|
||||
anchor=tk.W, wraplength=348))
|
||||
facilityButtons[x].grid(row=r, column=0, columnspan=2)
|
||||
facilityButtons[x].bind('<ButtonRelease>', facilityEvent)
|
||||
facilityDirs[key] = ''
|
||||
allParams[key] = {}
|
||||
if dictionaryOfFacilities[key]['mandatory']:
|
||||
facilityRequired.append(True)
|
||||
required = tk.Label(frame, text='Mandatory', fg='red')
|
||||
required.grid(row=r,column=5)
|
||||
else:
|
||||
facilityRequired.append(False)
|
||||
required = tk.Label(frame, text='Optional', fg='green')
|
||||
required.grid(row=r,column=5)
|
||||
|
||||
r = r + 1
|
||||
x = x + 1
|
||||
try:
|
||||
exec ('from ' + dictionaryOfFacilities[key]['factorymodule'] +
|
||||
' import ' + dictionaryOfFacilities[key]['factoryname'])
|
||||
raise RefactorWarning("refactor with __import__ built-in")
|
||||
except:
|
||||
pass
|
||||
pass
|
||||
pass
|
||||
# Buttons for saving the xml file, using an existing xml file,
|
||||
# changing the save settings, and quitting out of the program
|
||||
saveButton = tk.Button(frame, text="Save", command=componentInputXML)
|
||||
quitButton = tk.Button(frame, text="Quit", command=root.destroy)
|
||||
resetButton = tk.Button(frame, text='Reset All', command=reset)
|
||||
# The button for switching between multiple xml mode and single
|
||||
# mode. The default is multiple XML mode.
|
||||
singleFile = False
|
||||
singleFileButton = tk.Button(frame, text='Currently:\nMultiple XML Mode')
|
||||
singleFileButton.bind('<ButtonRelease>', changeSave)
|
||||
# The button used to get an existing XML file
|
||||
XMLButton = tk.Button(frame, text='Use an existing XML File', command=loadXML)
|
||||
saveButton.grid(row=r+1, column=2)
|
||||
quitButton.grid(row=r+1, column=3)
|
||||
resetButton.grid(row=r+1, column=4)
|
||||
singleFileButton.grid(row=r+1, column=5)
|
||||
XMLButton.grid(row=r+1, column=1)
|
||||
# Have the canvas create a window in the top left corner,
|
||||
# which is the frame with everything on it
|
||||
canvas.create_window(0, 0, anchor='nw', window=frame)
|
||||
frame.update_idletasks()
|
||||
canvas.config(scrollregion=canvas.bbox("all"))
|
||||
root.mainloop()
|
||||
else:
|
||||
tkMessageBox.showerror('ERROR!', 'Dictionary of Variables Empty: Nothing to do')
|
||||
pass
|
||||
sys.exit()
|
||||
|
|
@ -0,0 +1,7 @@
|
|||
add_subdirectory(isceobj)
|
||||
add_subdirectory(iscesys)
|
||||
add_subdirectory(mroipac)
|
||||
add_subdirectory(stdproc)
|
||||
add_subdirectory(zerodop)
|
||||
|
||||
InstallSameDir(__init__.py)
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2010 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Giangi Sacco
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
Import('env')
|
||||
package = 'components'
|
||||
envcomponents = env.Clone()
|
||||
envcomponents['PACKAGE'] = package
|
||||
envcomponents['INSTALL_PATH'] = os.path.join(envcomponents['PRJ_SCONS_INSTALL'],package)
|
||||
install = envcomponents['INSTALL_PATH']
|
||||
|
||||
initFile = '__init__.py'
|
||||
if not os.path.exists(initFile):
|
||||
fout = open(initFile,"w")
|
||||
fout.write("#!/usr/bin/env python3")
|
||||
fout.close()
|
||||
|
||||
listFiles = [initFile]
|
||||
envcomponents.Install(install,listFiles)
|
||||
envcomponents.Alias('install',install)
|
||||
Export('envcomponents')
|
||||
isceobj = 'isceobj/SConscript'
|
||||
SConscript(isceobj)
|
||||
mroipac = 'mroipac/SConscript'
|
||||
SConscript(mroipac)
|
||||
iscesys = 'iscesys/SConscript'
|
||||
SConscript(iscesys)
|
||||
stdproc = 'stdproc/SConscript'
|
||||
SConscript(stdproc)
|
||||
zerodop = 'zerodop/SConscript'
|
||||
SConscript(zerodop)
|
||||
|
|
@ -0,0 +1 @@
|
|||
#!/usr/bin/env python3
|
||||
|
|
@ -0,0 +1,949 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import logging.config
|
||||
from iscesys.Component.Component import Component
|
||||
from iscesys.DateTimeUtil.DateTimeUtil import DateTimeUtil as DTU
|
||||
from iscesys.Compatibility import Compatibility
|
||||
|
||||
|
||||
REFERENCE_DATE = Component.Parameter('referenceDate',
|
||||
public_name='reference date',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=True,
|
||||
doc='reference acquistion date')
|
||||
|
||||
SECONDARY_DATE = Component.Parameter('secondaryDate',
|
||||
public_name='secondary date',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=True,
|
||||
doc='secondary acquistion date')
|
||||
|
||||
MODE_COMBINATION = Component.Parameter('modeCombination',
|
||||
public_name='mode combination',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=True,
|
||||
doc='mode combination')
|
||||
|
||||
REFERENCE_FRAMES = Component.Parameter('referenceFrames',
|
||||
public_name = 'reference frames',
|
||||
default = None,
|
||||
type=str,
|
||||
container=list,
|
||||
mandatory=False,
|
||||
doc = 'reference frames to process')
|
||||
|
||||
SECONDARY_FRAMES = Component.Parameter('secondaryFrames',
|
||||
public_name = 'secondary frames',
|
||||
default = None,
|
||||
type=str,
|
||||
container=list,
|
||||
mandatory=False,
|
||||
doc = 'secondary frames to process')
|
||||
|
||||
STARTING_SWATH = Component.Parameter('startingSwath',
|
||||
public_name='starting swath',
|
||||
default=1,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="starting swath to process")
|
||||
|
||||
ENDING_SWATH = Component.Parameter('endingSwath',
|
||||
public_name='ending swath',
|
||||
default=5,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="ending swath to process")
|
||||
|
||||
BURST_UNSYNCHRONIZED_TIME = Component.Parameter('burstUnsynchronizedTime',
|
||||
public_name = 'burst unsynchronized time',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = False,
|
||||
doc = 'burst unsynchronized time in second')
|
||||
|
||||
BURST_SYNCHRONIZATION = Component.Parameter('burstSynchronization',
|
||||
public_name = 'burst synchronization',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = False,
|
||||
doc = 'average burst synchronization of all swaths and frames in percentage')
|
||||
|
||||
SWATH_RANGE_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('swathRangeOffsetGeometricalReference',
|
||||
public_name = 'swath range offset from geometry reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath range offset from geometry reference')
|
||||
|
||||
SWATH_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('swathAzimuthOffsetGeometricalReference',
|
||||
public_name = 'swath azimuth offset from geometry reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath azimuth offset from geometry reference')
|
||||
|
||||
SWATH_RANGE_OFFSET_MATCHING_REFERENCE = Component.Parameter('swathRangeOffsetMatchingReference',
|
||||
public_name = 'swath range offset from matching reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath range offset from matching reference')
|
||||
|
||||
SWATH_AZIMUTH_OFFSET_MATCHING_REFERENCE = Component.Parameter('swathAzimuthOffsetMatchingReference',
|
||||
public_name = 'swath azimuth offset from matching reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath azimuth offset from matching reference')
|
||||
|
||||
SWATH_RANGE_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('swathRangeOffsetGeometricalSecondary',
|
||||
public_name = 'swath range offset from geometry secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath range offset from geometry secondary')
|
||||
|
||||
SWATH_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('swathAzimuthOffsetGeometricalSecondary',
|
||||
public_name = 'swath azimuth offset from geometry secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath azimuth offset from geometry secondary')
|
||||
|
||||
SWATH_RANGE_OFFSET_MATCHING_SECONDARY = Component.Parameter('swathRangeOffsetMatchingSecondary',
|
||||
public_name = 'swath range offset from matching secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath range offset from matching secondary')
|
||||
|
||||
SWATH_AZIMUTH_OFFSET_MATCHING_SECONDARY = Component.Parameter('swathAzimuthOffsetMatchingSecondary',
|
||||
public_name = 'swath azimuth offset from matching secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'swath azimuth offset from matching secondary')
|
||||
|
||||
|
||||
|
||||
FRAME_RANGE_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('frameRangeOffsetGeometricalReference',
|
||||
public_name = 'frame range offset from geometry reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame range offset from geometry reference')
|
||||
|
||||
FRAME_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE = Component.Parameter('frameAzimuthOffsetGeometricalReference',
|
||||
public_name = 'frame azimuth offset from geometry reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame azimuth offset from geometry reference')
|
||||
|
||||
FRAME_RANGE_OFFSET_MATCHING_REFERENCE = Component.Parameter('frameRangeOffsetMatchingReference',
|
||||
public_name = 'frame range offset from matching reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame range offset from matching reference')
|
||||
|
||||
FRAME_AZIMUTH_OFFSET_MATCHING_REFERENCE = Component.Parameter('frameAzimuthOffsetMatchingReference',
|
||||
public_name = 'frame azimuth offset from matching reference',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame azimuth offset from matching reference')
|
||||
|
||||
FRAME_RANGE_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('frameRangeOffsetGeometricalSecondary',
|
||||
public_name = 'frame range offset from geometry secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame range offset from geometry secondary')
|
||||
|
||||
FRAME_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY = Component.Parameter('frameAzimuthOffsetGeometricalSecondary',
|
||||
public_name = 'frame azimuth offset from geometry secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame azimuth offset from geometry secondary')
|
||||
|
||||
FRAME_RANGE_OFFSET_MATCHING_SECONDARY = Component.Parameter('frameRangeOffsetMatchingSecondary',
|
||||
public_name = 'frame range offset from matching secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame range offset from matching secondary')
|
||||
|
||||
FRAME_AZIMUTH_OFFSET_MATCHING_SECONDARY = Component.Parameter('frameAzimuthOffsetMatchingSecondary',
|
||||
public_name = 'frame azimuth offset from matching secondary',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'frame azimuth offset from matching secondary')
|
||||
|
||||
NUMBER_RANGE_LOOKS1 = Component.Parameter('numberRangeLooks1',
|
||||
public_name='number of range looks 1',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of range looks when forming interferogram")
|
||||
|
||||
NUMBER_AZIMUTH_LOOKS1 = Component.Parameter('numberAzimuthLooks1',
|
||||
public_name='number of azimuth looks 1',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of azimuth looks when forming interferogram")
|
||||
|
||||
NUMBER_RANGE_LOOKS2 = Component.Parameter('numberRangeLooks2',
|
||||
public_name='number of range looks 2',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of range looks for further multiple looking")
|
||||
|
||||
NUMBER_AZIMUTH_LOOKS2 = Component.Parameter('numberAzimuthLooks2',
|
||||
public_name='number of azimuth looks 2',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of azimuth looks for further multiple looking")
|
||||
|
||||
NUMBER_RANGE_LOOKS_SIM = Component.Parameter('numberRangeLooksSim',
|
||||
public_name='number of range looks sim',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of range looks when simulating radar image")
|
||||
|
||||
NUMBER_AZIMUTH_LOOKS_SIM = Component.Parameter('numberAzimuthLooksSim',
|
||||
public_name='number of azimuth looks sim',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of azimuth looks when simulating radar image")
|
||||
|
||||
NUMBER_RANGE_LOOKS_ION = Component.Parameter('numberRangeLooksIon',
|
||||
public_name='number of range looks ion',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of range looks for ionospheric correction")
|
||||
|
||||
NUMBER_AZIMUTH_LOOKS_ION = Component.Parameter('numberAzimuthLooksIon',
|
||||
public_name='number of azimuth looks ion',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="number of azimuth looks for ionospheric correction")
|
||||
|
||||
SUBBAND_RADAR_WAVLENGTH = Component.Parameter('subbandRadarWavelength',
|
||||
public_name='lower and upper radar wavelength for ionosphere correction',
|
||||
default=None,
|
||||
type=float,
|
||||
mandatory=False,
|
||||
container = list,
|
||||
doc="lower and upper radar wavelength for ionosphere correction")
|
||||
|
||||
RADAR_DEM_AFFINE_TRANSFORM = Component.Parameter('radarDemAffineTransform',
|
||||
public_name = 'radar dem affine transform parameters',
|
||||
default = None,
|
||||
type = float,
|
||||
mandatory = True,
|
||||
container = list,
|
||||
doc = 'radar dem affine transform parameters')
|
||||
|
||||
|
||||
REFERENCE_SLC = Component.Parameter('referenceSlc',
|
||||
public_name='reference slc',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='reference slc file')
|
||||
|
||||
SECONDARY_SLC = Component.Parameter('secondarySlc',
|
||||
public_name='secondary slc',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='secondary slc file')
|
||||
|
||||
REFERENCE_SWATH_OFFSET = Component.Parameter('referenceSwathOffset',
|
||||
public_name='reference swath offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='reference swath offset file')
|
||||
|
||||
SECONDARY_SWATH_OFFSET = Component.Parameter('secondarySwathOffset',
|
||||
public_name='secondary swath offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='secondary swath offset file')
|
||||
|
||||
REFERENCE_FRAME_OFFSET = Component.Parameter('referenceFrameOffset',
|
||||
public_name='reference frame offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='reference frame offset file')
|
||||
|
||||
SECONDARY_FRAME_OFFSET = Component.Parameter('secondaryFrameOffset',
|
||||
public_name='secondary frame offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='secondary frame offset file')
|
||||
|
||||
REFERENCE_FRAME_PARAMETER = Component.Parameter('referenceFrameParameter',
|
||||
public_name='reference frame parameter',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='reference frame parameter file')
|
||||
|
||||
SECONDARY_FRAME_PARAMETER = Component.Parameter('secondaryFrameParameter',
|
||||
public_name='secondary frame parameter',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='secondary frame parameter file')
|
||||
|
||||
REFERENCE_TRACK_PARAMETER = Component.Parameter('referenceTrackParameter',
|
||||
public_name='reference track parameter',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='reference track parameter file')
|
||||
|
||||
SECONDARY_TRACK_PARAMETER = Component.Parameter('secondaryTrackParameter',
|
||||
public_name='secondary track parameter',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='secondary track parameter file')
|
||||
|
||||
DEM = Component.Parameter('dem',
|
||||
public_name='dem for coregistration',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='dem for coregistration file')
|
||||
|
||||
DEM_GEO = Component.Parameter('demGeo',
|
||||
public_name='dem for geocoding',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='dem for geocoding file')
|
||||
|
||||
WBD = Component.Parameter('wbd',
|
||||
public_name='water body',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='water body file')
|
||||
|
||||
WBD_OUT = Component.Parameter('wbdOut',
|
||||
public_name='output water body',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='output water body file')
|
||||
|
||||
INTERFEROGRAM = Component.Parameter('interferogram',
|
||||
public_name='interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='interferogram file')
|
||||
|
||||
AMPLITUDE = Component.Parameter('amplitude',
|
||||
public_name='amplitude',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='amplitude file')
|
||||
|
||||
DIFFERENTIAL_INTERFEROGRAM = Component.Parameter('differentialInterferogram',
|
||||
public_name='differential interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='differential interferogram file')
|
||||
|
||||
MULTILOOK_DIFFERENTIAL_INTERFEROGRAM = Component.Parameter('multilookDifferentialInterferogram',
|
||||
public_name='multilook differential interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook differential interferogram file')
|
||||
|
||||
MULTILOOK_DIFFERENTIAL_INTERFEROGRAM_ORIGINAL = Component.Parameter('multilookDifferentialInterferogramOriginal',
|
||||
public_name='original multilook differential interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='original multilook differential interferogram file')
|
||||
|
||||
MULTILOOK_AMPLITUDE = Component.Parameter('multilookAmplitude',
|
||||
public_name='multilook amplitude',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook amplitude file')
|
||||
|
||||
MULTILOOK_COHERENCE = Component.Parameter('multilookCoherence',
|
||||
public_name='multilook coherence',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook coherence file')
|
||||
|
||||
MULTILOOK_PHSIG = Component.Parameter('multilookPhsig',
|
||||
public_name='multilook phase sigma',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook phase sigma file')
|
||||
|
||||
FILTERED_INTERFEROGRAM = Component.Parameter('filteredInterferogram',
|
||||
public_name='filtered interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='filtered interferogram file')
|
||||
|
||||
UNWRAPPED_INTERFEROGRAM = Component.Parameter('unwrappedInterferogram',
|
||||
public_name='unwrapped interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='unwrapped interferogram file')
|
||||
|
||||
UNWRAPPED_MASKED_INTERFEROGRAM = Component.Parameter('unwrappedMaskedInterferogram',
|
||||
public_name='unwrapped masked interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='unwrapped masked interferogram file')
|
||||
|
||||
LATITUDE = Component.Parameter('latitude',
|
||||
public_name='latitude',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='latitude file')
|
||||
|
||||
LONGITUDE = Component.Parameter('longitude',
|
||||
public_name='longitude',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='longitude file')
|
||||
|
||||
HEIGHT = Component.Parameter('height',
|
||||
public_name='height',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='height file')
|
||||
|
||||
LOS = Component.Parameter('los',
|
||||
public_name='los',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='los file')
|
||||
|
||||
SIM = Component.Parameter('sim',
|
||||
public_name='sim',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='sim file')
|
||||
|
||||
MSK = Component.Parameter('msk',
|
||||
public_name='msk',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='msk file')
|
||||
|
||||
RANGE_OFFSET = Component.Parameter('rangeOffset',
|
||||
public_name='range offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='range offset file')
|
||||
|
||||
AZIMUTH_OFFSET = Component.Parameter('azimuthOffset',
|
||||
public_name='azimuth offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='azimuth offset file')
|
||||
|
||||
|
||||
MULTILOOK_LOS = Component.Parameter('multilookLos',
|
||||
public_name='multilook los',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook los file')
|
||||
|
||||
MULTILOOK_MSK = Component.Parameter('multilookMsk',
|
||||
public_name='multilook msk',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook msk file')
|
||||
|
||||
MULTILOOK_WBD_OUT = Component.Parameter('multilookWbdOut',
|
||||
public_name='multilook wbdOut',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook output water body file')
|
||||
|
||||
MULTILOOK_LATITUDE = Component.Parameter('multilookLatitude',
|
||||
public_name='multilook latitude',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook latitude file')
|
||||
|
||||
MULTILOOK_LONGITUDE = Component.Parameter('multilookLongitude',
|
||||
public_name='multilook longitude',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook longitude file')
|
||||
|
||||
MULTILOOK_HEIGHT = Component.Parameter('multilookHeight',
|
||||
public_name='multilook height',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook height file')
|
||||
|
||||
MULTILOOK_ION = Component.Parameter('multilookIon',
|
||||
public_name='multilook ionospheric phase',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='multilook ionospheric phase file')
|
||||
|
||||
RECT_RANGE_OFFSET = Component.Parameter('rectRangeOffset',
|
||||
public_name='rectified range offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='rectified range offset file')
|
||||
|
||||
GEO_INTERFEROGRAM = Component.Parameter('geoInterferogram',
|
||||
public_name='geocoded interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded interferogram file')
|
||||
|
||||
GEO_MASKED_INTERFEROGRAM = Component.Parameter('geoMaskedInterferogram',
|
||||
public_name='geocoded masked interferogram',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded masked interferogram file')
|
||||
|
||||
GEO_COHERENCE = Component.Parameter('geoCoherence',
|
||||
public_name='geocoded coherence',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded coherence file')
|
||||
|
||||
GEO_LOS = Component.Parameter('geoLos',
|
||||
public_name='geocoded los',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded los file')
|
||||
|
||||
GEO_ION = Component.Parameter('geoIon',
|
||||
public_name='geocoded ionospheric phase',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded ionospheric phase file')
|
||||
###################################################################
|
||||
|
||||
#for dense offset
|
||||
OFFSET_IMAGE_TOPOFFSET = Component.Parameter('offsetImageTopoffset',
|
||||
public_name='offset image top offset',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="offset image top offset in samples")
|
||||
|
||||
OFFSET_IMAGE_LEFTOFFSET = Component.Parameter('offsetImageLeftoffset',
|
||||
public_name='offset image left offset',
|
||||
default=None,
|
||||
type=int,
|
||||
mandatory=False,
|
||||
doc="offset image left offset in samples")
|
||||
|
||||
SECONDARY_SLC_COREGISTERED = Component.Parameter('secondarySlcCoregistered',
|
||||
public_name='coregistered secondary slc',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='coregistered secondary slc file')
|
||||
|
||||
DENSE_OFFSET = Component.Parameter('denseOffset',
|
||||
public_name='dense offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='dense offset file')
|
||||
|
||||
DENSE_OFFSET_SNR = Component.Parameter('denseOffsetSnr',
|
||||
public_name='dense offset snr',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='dense offset snr file')
|
||||
|
||||
DENSE_OFFSET_COV = Component.Parameter('denseOffsetCov',
|
||||
public_name='dense offset covariance',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='dense offset covariance file')
|
||||
|
||||
DENSE_OFFSET_FILT = Component.Parameter('denseOffsetFilt',
|
||||
public_name='filtered dense offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='filtered dense offset file')
|
||||
|
||||
GEO_DENSE_OFFSET = Component.Parameter('GeoDenseOffset',
|
||||
public_name='geocoded dense offset',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded dense offset file')
|
||||
|
||||
GEO_DENSE_OFFSET_SNR = Component.Parameter('GeoDenseOffsetSnr',
|
||||
public_name='geocoded dense offset snr',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded dense offset snr file')
|
||||
|
||||
GEO_DENSE_OFFSET_FILT = Component.Parameter('GeoDenseOffsetFilt',
|
||||
public_name='geocoded dense offset with filtering',
|
||||
default=None,
|
||||
type=str,
|
||||
mandatory=False,
|
||||
doc='geocoded dense offset with filtering')
|
||||
###################################################################
|
||||
|
||||
class Alos2Proc(Component):
|
||||
"""
|
||||
This class holds the properties, along with methods (setters and getters)
|
||||
to modify and return their values.
|
||||
"""
|
||||
|
||||
parameter_list = (REFERENCE_DATE,
|
||||
SECONDARY_DATE,
|
||||
MODE_COMBINATION,
|
||||
REFERENCE_FRAMES,
|
||||
SECONDARY_FRAMES,
|
||||
STARTING_SWATH,
|
||||
ENDING_SWATH,
|
||||
BURST_UNSYNCHRONIZED_TIME,
|
||||
BURST_SYNCHRONIZATION,
|
||||
SWATH_RANGE_OFFSET_GEOMETRICAL_REFERENCE,
|
||||
SWATH_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE,
|
||||
SWATH_RANGE_OFFSET_MATCHING_REFERENCE,
|
||||
SWATH_AZIMUTH_OFFSET_MATCHING_REFERENCE,
|
||||
SWATH_RANGE_OFFSET_GEOMETRICAL_SECONDARY,
|
||||
SWATH_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY,
|
||||
SWATH_RANGE_OFFSET_MATCHING_SECONDARY,
|
||||
SWATH_AZIMUTH_OFFSET_MATCHING_SECONDARY,
|
||||
FRAME_RANGE_OFFSET_GEOMETRICAL_REFERENCE,
|
||||
FRAME_AZIMUTH_OFFSET_GEOMETRICAL_REFERENCE,
|
||||
FRAME_RANGE_OFFSET_MATCHING_REFERENCE,
|
||||
FRAME_AZIMUTH_OFFSET_MATCHING_REFERENCE,
|
||||
FRAME_RANGE_OFFSET_GEOMETRICAL_SECONDARY,
|
||||
FRAME_AZIMUTH_OFFSET_GEOMETRICAL_SECONDARY,
|
||||
FRAME_RANGE_OFFSET_MATCHING_SECONDARY,
|
||||
FRAME_AZIMUTH_OFFSET_MATCHING_SECONDARY,
|
||||
NUMBER_RANGE_LOOKS1,
|
||||
NUMBER_AZIMUTH_LOOKS1,
|
||||
NUMBER_RANGE_LOOKS2,
|
||||
NUMBER_AZIMUTH_LOOKS2,
|
||||
NUMBER_RANGE_LOOKS_SIM,
|
||||
NUMBER_AZIMUTH_LOOKS_SIM,
|
||||
NUMBER_RANGE_LOOKS_ION,
|
||||
NUMBER_AZIMUTH_LOOKS_ION,
|
||||
SUBBAND_RADAR_WAVLENGTH,
|
||||
RADAR_DEM_AFFINE_TRANSFORM,
|
||||
REFERENCE_SLC,
|
||||
SECONDARY_SLC,
|
||||
REFERENCE_SWATH_OFFSET,
|
||||
SECONDARY_SWATH_OFFSET,
|
||||
REFERENCE_FRAME_OFFSET,
|
||||
SECONDARY_FRAME_OFFSET,
|
||||
REFERENCE_FRAME_PARAMETER,
|
||||
SECONDARY_FRAME_PARAMETER,
|
||||
REFERENCE_TRACK_PARAMETER,
|
||||
SECONDARY_TRACK_PARAMETER,
|
||||
DEM,
|
||||
DEM_GEO,
|
||||
WBD,
|
||||
WBD_OUT,
|
||||
INTERFEROGRAM,
|
||||
AMPLITUDE,
|
||||
DIFFERENTIAL_INTERFEROGRAM,
|
||||
MULTILOOK_DIFFERENTIAL_INTERFEROGRAM,
|
||||
MULTILOOK_DIFFERENTIAL_INTERFEROGRAM_ORIGINAL,
|
||||
MULTILOOK_AMPLITUDE,
|
||||
MULTILOOK_COHERENCE,
|
||||
MULTILOOK_PHSIG,
|
||||
FILTERED_INTERFEROGRAM,
|
||||
UNWRAPPED_INTERFEROGRAM,
|
||||
UNWRAPPED_MASKED_INTERFEROGRAM,
|
||||
LATITUDE,
|
||||
LONGITUDE,
|
||||
HEIGHT,
|
||||
LOS,
|
||||
SIM,
|
||||
MSK,
|
||||
RANGE_OFFSET,
|
||||
AZIMUTH_OFFSET,
|
||||
MULTILOOK_LOS,
|
||||
MULTILOOK_MSK,
|
||||
MULTILOOK_WBD_OUT,
|
||||
MULTILOOK_LATITUDE,
|
||||
MULTILOOK_LONGITUDE,
|
||||
MULTILOOK_HEIGHT,
|
||||
MULTILOOK_ION,
|
||||
RECT_RANGE_OFFSET,
|
||||
GEO_INTERFEROGRAM,
|
||||
GEO_MASKED_INTERFEROGRAM,
|
||||
GEO_COHERENCE,
|
||||
GEO_LOS,
|
||||
GEO_ION,
|
||||
OFFSET_IMAGE_TOPOFFSET,
|
||||
OFFSET_IMAGE_LEFTOFFSET,
|
||||
SECONDARY_SLC_COREGISTERED,
|
||||
DENSE_OFFSET,
|
||||
DENSE_OFFSET_SNR,
|
||||
DENSE_OFFSET_COV,
|
||||
DENSE_OFFSET_FILT,
|
||||
GEO_DENSE_OFFSET,
|
||||
GEO_DENSE_OFFSET_SNR,
|
||||
GEO_DENSE_OFFSET_FILT)
|
||||
|
||||
facility_list = ()
|
||||
|
||||
|
||||
family='alos2context'
|
||||
|
||||
def __init__(self, name='', procDoc=None):
|
||||
#self.updatePrivate()
|
||||
|
||||
super().__init__(family=self.__class__.family, name=name)
|
||||
self.procDoc = procDoc
|
||||
return None
|
||||
|
||||
def setFilename(self, referenceDate, secondaryDate, nrlks1, nalks1, nrlks2, nalks2):
|
||||
|
||||
# if referenceDate == None:
|
||||
# referenceDate = self.referenceDate
|
||||
# if secondaryDate == None:
|
||||
# secondaryDate = self.secondaryDate
|
||||
# if nrlks1 == None:
|
||||
# nrlks1 = self.numberRangeLooks1
|
||||
# if nalks1 == None:
|
||||
# nalks1 = self.numberAzimuthLooks1
|
||||
# if nrlks2 == None:
|
||||
# nrlks2 = self.numberRangeLooks2
|
||||
# if nalks2 == None:
|
||||
# nalks2 = self.numberAzimuthLooks2
|
||||
|
||||
ms = referenceDate + '-' + secondaryDate
|
||||
ml1 = '_{}rlks_{}alks'.format(nrlks1, nalks1)
|
||||
ml2 = '_{}rlks_{}alks'.format(nrlks1*nrlks2, nalks1*nalks2)
|
||||
|
||||
self.referenceSlc = referenceDate + '.slc'
|
||||
self.secondarySlc = secondaryDate + '.slc'
|
||||
self.referenceSwathOffset = 'swath_offset_' + referenceDate + '.txt'
|
||||
self.secondarySwathOffset = 'swath_offset_' + secondaryDate + '.txt'
|
||||
self.referenceFrameOffset = 'frame_offset_' + referenceDate + '.txt'
|
||||
self.secondaryFrameOffset = 'frame_offset_' + secondaryDate + '.txt'
|
||||
self.referenceFrameParameter = referenceDate + '.frame.xml'
|
||||
self.secondaryFrameParameter = secondaryDate + '.frame.xml'
|
||||
self.referenceTrackParameter = referenceDate + '.track.xml'
|
||||
self.secondaryTrackParameter = secondaryDate + '.track.xml'
|
||||
#self.dem =
|
||||
#self.demGeo =
|
||||
#self.wbd =
|
||||
self.interferogram = ms + ml1 + '.int'
|
||||
self.amplitude = ms + ml1 + '.amp'
|
||||
self.differentialInterferogram = 'diff_' + ms + ml1 + '.int'
|
||||
self.multilookDifferentialInterferogram = 'diff_' + ms + ml2 + '.int'
|
||||
self.multilookDifferentialInterferogramOriginal = 'diff_' + ms + ml2 + '_ori.int'
|
||||
self.multilookAmplitude = ms + ml2 + '.amp'
|
||||
self.multilookCoherence = ms + ml2 + '.cor'
|
||||
self.multilookPhsig = ms + ml2 + '.phsig'
|
||||
self.filteredInterferogram = 'filt_' + ms + ml2 + '.int'
|
||||
self.unwrappedInterferogram = 'filt_' + ms + ml2 + '.unw'
|
||||
self.unwrappedMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw'
|
||||
self.latitude = ms + ml1 + '.lat'
|
||||
self.longitude = ms + ml1 + '.lon'
|
||||
self.height = ms + ml1 + '.hgt'
|
||||
self.los = ms + ml1 + '.los'
|
||||
self.sim = ms + ml1 + '.sim'
|
||||
self.msk = ms + ml1 + '.msk'
|
||||
self.wbdOut = ms + ml1 + '.wbd'
|
||||
self.rangeOffset = ms + ml1 + '_rg.off'
|
||||
self.azimuthOffset = ms + ml1 + '_az.off'
|
||||
self.multilookLos = ms + ml2 + '.los'
|
||||
self.multilookWbdOut = ms + ml2 + '.wbd'
|
||||
self.multilookMsk = ms + ml2 + '.msk'
|
||||
self.multilookLatitude = ms + ml2 + '.lat'
|
||||
self.multilookLongitude = ms + ml2 + '.lon'
|
||||
self.multilookHeight = ms + ml2 + '.hgt'
|
||||
self.multilookIon = ms + ml2 + '.ion'
|
||||
self.rectRangeOffset = ms + ml1 + '_rg_rect.off'
|
||||
self.geoInterferogram = 'filt_' + ms + ml2 + '.unw.geo'
|
||||
self.geoMaskedInterferogram = 'filt_' + ms + ml2 + '_msk.unw.geo'
|
||||
self.geoCoherence = ms + ml2 + '.cor.geo'
|
||||
self.geoLos = ms + ml2 + '.los.geo'
|
||||
#dense offset field
|
||||
self.secondarySlcCoregistered = secondaryDate + '_coreg.slc'
|
||||
self.denseOffset = ms + '_denseoffset.off'
|
||||
self.denseOffsetSnr = ms + '_denseoffset.snr'
|
||||
self.denseOffsetCov = ms + '_denseoffset.cov'
|
||||
self.denseOffsetFilt = 'filt_' + ms + '_denseoffset.off'
|
||||
self.GeoDenseOffset = ms + '_denseoffset.off.geo'
|
||||
self.GeoDenseOffsetSnr = ms + '_denseoffset.snr.geo'
|
||||
self.GeoDenseOffsetFilt = 'filt_' + ms + '_denseoffset.off.geo'
|
||||
self.geoIon = ms + ml2 + '.ion.geo'
|
||||
|
||||
|
||||
def loadProduct(self, xmlname):
|
||||
'''
|
||||
Load the product using Product Manager.
|
||||
'''
|
||||
|
||||
from iscesys.Component.ProductManager import ProductManager as PM
|
||||
|
||||
pm = PM()
|
||||
pm.configure()
|
||||
|
||||
obj = pm.loadProduct(xmlname)
|
||||
|
||||
return obj
|
||||
|
||||
|
||||
def saveProduct(self, obj, xmlname):
|
||||
'''
|
||||
Save the product to an XML file using Product Manager.
|
||||
'''
|
||||
|
||||
from iscesys.Component.ProductManager import ProductManager as PM
|
||||
|
||||
pm = PM()
|
||||
pm.configure()
|
||||
|
||||
pm.dumpProduct(obj, xmlname)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def loadTrack(self, reference=True):
|
||||
'''
|
||||
Load the track using Product Manager.
|
||||
'''
|
||||
if reference:
|
||||
track = self.loadProduct(self.referenceTrackParameter)
|
||||
else:
|
||||
track = self.loadProduct(self.secondaryTrackParameter)
|
||||
|
||||
track.frames = []
|
||||
for i, frameNumber in enumerate(self.referenceFrames):
|
||||
os.chdir('f{}_{}'.format(i+1, frameNumber))
|
||||
if reference:
|
||||
track.frames.append(self.loadProduct(self.referenceFrameParameter))
|
||||
else:
|
||||
track.frames.append(self.loadProduct(self.secondaryFrameParameter))
|
||||
os.chdir('../')
|
||||
|
||||
return track
|
||||
|
||||
|
||||
def saveTrack(self, track, reference=True):
|
||||
'''
|
||||
Save the track to XML files using Product Manager.
|
||||
'''
|
||||
if reference:
|
||||
self.saveProduct(track, self.referenceTrackParameter)
|
||||
else:
|
||||
self.saveProduct(track, self.secondaryTrackParameter)
|
||||
|
||||
for i, frameNumber in enumerate(self.referenceFrames):
|
||||
os.chdir('f{}_{}'.format(i+1, frameNumber))
|
||||
if reference:
|
||||
self.saveProduct(track.frames[i], self.referenceFrameParameter)
|
||||
else:
|
||||
self.saveProduct(track.frames[i], self.secondaryFrameParameter)
|
||||
os.chdir('../')
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def hasGPU(self):
|
||||
'''
|
||||
Determine if GPU modules are available.
|
||||
'''
|
||||
|
||||
flag = False
|
||||
try:
|
||||
from zerodop.GPUtopozero.GPUtopozero import PyTopozero
|
||||
from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr
|
||||
flag = True
|
||||
except:
|
||||
pass
|
||||
|
||||
return flag
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
|
|
@ -0,0 +1,39 @@
|
|||
InstallSameDir(
|
||||
__init__.py
|
||||
Alos2Proc.py
|
||||
Alos2ProcPublic.py
|
||||
Factories.py
|
||||
denseOffsetNote.txt
|
||||
runBaseline.py
|
||||
runCoherence.py
|
||||
runDenseOffset.py
|
||||
runDiffInterferogram.py
|
||||
runDownloadDem.py
|
||||
runFilt.py
|
||||
runFiltOffset.py
|
||||
runFormInterferogram.py
|
||||
runFrameMosaic.py
|
||||
runFrameOffset.py
|
||||
runGeo2Rdr.py
|
||||
runGeocode.py
|
||||
runGeocodeOffset.py
|
||||
runIonCorrect.py
|
||||
runIonFilt.py
|
||||
runIonSubband.py
|
||||
runIonUwrap.py
|
||||
runLook.py
|
||||
runPrepareSlc.py
|
||||
runPreprocessor.py
|
||||
runRdr2Geo.py
|
||||
runRdrDemOffset.py
|
||||
runRectRangeOffset.py
|
||||
runSlcMatch.py
|
||||
runSlcMosaic.py
|
||||
runSlcOffset.py
|
||||
runSwathMosaic.py
|
||||
runSwathOffset.py
|
||||
runUnwrapSnaphu.py
|
||||
srtm_no_swbd_tiles.txt
|
||||
srtm_tiles.txt
|
||||
swbd_tiles.txt
|
||||
)
|
||||
|
|
@ -0,0 +1,107 @@
|
|||
#
|
||||
# Author: Piyush Agram
|
||||
# Copyright 2016
|
||||
#
|
||||
|
||||
# Path to the _RunWrapper factories
|
||||
_PATH = "isceobj.Alos2Proc."
|
||||
|
||||
## A factory to make _RunWrapper factories
|
||||
def _factory(name, other_name=None):
|
||||
"""create_run_wrapper = _factory(name)
|
||||
name is the module and class function name
|
||||
"""
|
||||
other_name = other_name or name
|
||||
module = __import__(
|
||||
_PATH+name, fromlist=[""]
|
||||
)
|
||||
cls = getattr(module, other_name)
|
||||
def creater(other, *args, **kwargs):
|
||||
"""_RunWrapper for object calling %s"""
|
||||
return _RunWrapper(other, cls)
|
||||
return creater
|
||||
|
||||
## Put in "_" to prevernt import on "from Factorties import *"
|
||||
class _RunWrapper(object):
|
||||
"""_RunWrapper(other, func)(*args, **kwargs)
|
||||
|
||||
executes:
|
||||
|
||||
func(other, *args, **kwargs)
|
||||
|
||||
(like a method)
|
||||
"""
|
||||
def __init__(self, other, func):
|
||||
self.method = func
|
||||
self.other = other
|
||||
return None
|
||||
|
||||
def __call__(self, *args, **kwargs):
|
||||
return self.method(self.other, *args, **kwargs)
|
||||
|
||||
pass
|
||||
|
||||
def createUnwrapper(other, do_unwrap = None, unwrapperName = None,
|
||||
unwrap = None):
|
||||
if not do_unwrap and not unwrap:
|
||||
#if not defined create an empty method that does nothing
|
||||
def runUnwrap(self):
|
||||
return None
|
||||
elif unwrapperName.lower() == 'snaphu':
|
||||
from .runUnwrapSnaphu import runUnwrap
|
||||
elif unwrapperName.lower() == 'snaphu_mcf':
|
||||
from .runUnwrapSnaphu import runUnwrapMcf as runUnwrap
|
||||
elif unwrapperName.lower() == 'downsample_snaphu':
|
||||
from .run_downsample_unwrapper import runUnwrap
|
||||
elif unwrapperName.lower() == 'icu':
|
||||
from .runUnwrapIcu import runUnwrap
|
||||
elif unwrapperName.lower() == 'grass':
|
||||
from .runUnwrapGrass import runUnwrap
|
||||
return _RunWrapper(other, runUnwrap)
|
||||
|
||||
def createUnwrap2Stage(other, do_unwrap_2stage = None, unwrapperName = None):
|
||||
if (not do_unwrap_2stage) or (unwrapperName.lower() == 'icu') or (unwrapperName.lower() == 'grass'):
|
||||
#if not defined create an empty method that does nothing
|
||||
def runUnwrap2Stage(*arg, **kwargs):
|
||||
return None
|
||||
else:
|
||||
try:
|
||||
import pulp
|
||||
from .runUnwrap2Stage import runUnwrap2Stage
|
||||
except ImportError:
|
||||
raise Exception('Please install PuLP Linear Programming API to run 2stage unwrap')
|
||||
return _RunWrapper(other, runUnwrap2Stage)
|
||||
|
||||
|
||||
createPreprocessor = _factory("runPreprocessor")
|
||||
createBaseline = _factory("runBaseline")
|
||||
createDownloadDem = _factory("runDownloadDem")
|
||||
createPrepareSlc = _factory("runPrepareSlc")
|
||||
createSlcOffset = _factory("runSlcOffset")
|
||||
createFormInterferogram = _factory("runFormInterferogram")
|
||||
createSwathOffset = _factory("runSwathOffset")
|
||||
createSwathMosaic = _factory("runSwathMosaic")
|
||||
createFrameOffset = _factory("runFrameOffset")
|
||||
createFrameMosaic = _factory("runFrameMosaic")
|
||||
createRdr2Geo = _factory("runRdr2Geo")
|
||||
createGeo2Rdr = _factory("runGeo2Rdr")
|
||||
createRdrDemOffset = _factory("runRdrDemOffset")
|
||||
createRectRangeOffset = _factory("runRectRangeOffset")
|
||||
createDiffInterferogram = _factory("runDiffInterferogram")
|
||||
createLook = _factory("runLook")
|
||||
createCoherence = _factory("runCoherence")
|
||||
createIonSubband = _factory("runIonSubband")
|
||||
createIonUwrap = _factory("runIonUwrap")
|
||||
createIonFilt = _factory("runIonFilt")
|
||||
createIonCorrect = _factory("runIonCorrect")
|
||||
createFilt = _factory("runFilt")
|
||||
createUnwrapSnaphu = _factory("runUnwrapSnaphu")
|
||||
createGeocode = _factory("runGeocode")
|
||||
|
||||
createSlcMosaic = _factory("runSlcMosaic")
|
||||
createSlcMatch = _factory("runSlcMatch")
|
||||
createDenseOffset = _factory("runDenseOffset")
|
||||
createFiltOffset = _factory("runFiltOffset")
|
||||
createGeocodeOffset = _factory("runGeocodeOffset")
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,45 @@
|
|||
#! /usr/bin/env python
|
||||
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
# Copyright 2012 California Institute of Technology. ALL RIGHTS RESERVED.
|
||||
#
|
||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||
# you may not use this file except in compliance with the License.
|
||||
# You may obtain a copy of the License at
|
||||
#
|
||||
# http://www.apache.org/licenses/LICENSE-2.0
|
||||
#
|
||||
# Unless required by applicable law or agreed to in writing, software
|
||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
# See the License for the specific language governing permissions and
|
||||
# limitations under the License.
|
||||
#
|
||||
# United States Government Sponsorship acknowledged. This software is subject to
|
||||
# U.S. export control laws and regulations and has been classified as 'EAR99 NLR'
|
||||
# (No [Export] License Required except when exporting to an embargoed country,
|
||||
# end user, or in support of a prohibited end use). By downloading this software,
|
||||
# the user agrees to comply with all applicable U.S. export laws and regulations.
|
||||
# The user has the responsibility to obtain export licenses, or other export
|
||||
# authority as may be required before exporting this software to any 'EAR99'
|
||||
# embargoed foreign country or citizen of those countries.
|
||||
#
|
||||
# Author: Eric Gurrola
|
||||
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
#!/usr/bin/env python
|
||||
import os
|
||||
|
||||
Import('envisceobj')
|
||||
package = envisceobj['PACKAGE']
|
||||
project = 'Alos2Proc'
|
||||
|
||||
install = os.path.join(envisceobj['PRJ_SCONS_INSTALL'],package,project)
|
||||
|
||||
listFiles = ['__init__.py', 'Factories.py', 'Alos2Proc.py', 'Alos2ProcPublic.py', 'runPreprocessor.py', 'runBaseline.py', 'runDownloadDem.py', 'runPrepareSlc.py', 'runSlcOffset.py', 'runFormInterferogram.py', 'runSwathOffset.py', 'runSwathMosaic.py', 'runFrameOffset.py', 'runFrameMosaic.py', 'runRdr2Geo.py', 'runGeo2Rdr.py', 'runRdrDemOffset.py', 'runRectRangeOffset.py', 'runDiffInterferogram.py', 'runLook.py', 'runCoherence.py', 'runIonSubband.py', 'runIonUwrap.py', 'runIonFilt.py', 'runIonCorrect.py', 'runFilt.py', 'runUnwrapSnaphu.py', 'runGeocode.py', 'srtm_no_swbd_tiles.txt', 'srtm_tiles.txt', 'swbd_tiles.txt', 'runSlcMosaic.py', 'runSlcMatch.py', 'runDenseOffset.py', 'runFiltOffset.py', 'runGeocodeOffset.py', 'denseOffsetNote.txt']
|
||||
envisceobj.Install(install,listFiles)
|
||||
envisceobj.Alias('install',install)
|
||||
|
|
@ -0,0 +1,22 @@
|
|||
#
|
||||
# Author: Piyush Agram
|
||||
# Copyright 2016
|
||||
#
|
||||
|
||||
from .Alos2Proc import *
|
||||
from .Factories import *
|
||||
|
||||
def getFactoriesInfo():
|
||||
return {'Alos2Proc':
|
||||
{'args':
|
||||
{
|
||||
'procDoc':{'value':None,'type':'Catalog','optional':True}
|
||||
},
|
||||
'factory':'createAlos2Proc'
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
def createAlos2Proc(name=None, procDoc= None):
|
||||
from .Alos2Proc import Alos2Proc
|
||||
return Alos2Proc(name = name,procDoc = procDoc)
|
||||
|
|
@ -0,0 +1,31 @@
|
|||
on the following paramters might be changed in the denseoffset steps:
|
||||
=======================================================================
|
||||
if self.frameOffsetMatching == False:
|
||||
self._insar.frameRangeOffsetMatchingReference = offsetReference[2]
|
||||
self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3]
|
||||
self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2]
|
||||
self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3]
|
||||
|
||||
|
||||
Therefore these denseoffset steps could be moved to after 'frame_mosaic' step
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,229 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
import isceobj.Sensor.MultiMode as MultiMode
|
||||
from isceobj.Planet.Planet import Planet
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runBaseline')
|
||||
|
||||
def runBaseline(self):
|
||||
'''compute baseline
|
||||
'''
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
|
||||
##################################################
|
||||
#2. compute burst synchronization
|
||||
##################################################
|
||||
#burst synchronization may slowly change along a track as a result of the changing relative speed of the two flights
|
||||
#in one frame, real unsynchronized time is the same for all swaths
|
||||
unsynTime = 0
|
||||
#real synchronized time/percentage depends on the swath burst length (synTime = burstlength - abs(unsynTime))
|
||||
#synTime = 0
|
||||
synPercentage = 0
|
||||
|
||||
numberOfFrames = len(self._insar.referenceFrames)
|
||||
numberOfSwaths = self._insar.endingSwath - self._insar.startingSwath + 1
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
#using Piyush's code for computing range and azimuth offsets
|
||||
midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5
|
||||
midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf)
|
||||
llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange)
|
||||
slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh)
|
||||
###Translate to offsets
|
||||
#note that secondary range pixel size and prf might be different from reference, here we assume there is a virtual secondary with same
|
||||
#range pixel size and prf
|
||||
rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5
|
||||
azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5
|
||||
|
||||
#compute burst synchronization
|
||||
#burst parameters for ScanSAR wide mode not estimed yet
|
||||
if self._insar.modeCombination == 21:
|
||||
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
|
||||
#secondary burst start times corresponding to reference burst start times (100% synchronization)
|
||||
scburstStartLines = np.arange(scburstStartLine - 100000*referenceSwath.burstCycleLength, \
|
||||
scburstStartLine + 100000*referenceSwath.burstCycleLength, \
|
||||
referenceSwath.burstCycleLength)
|
||||
dscburstStartLines = -((secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf - scburstStartLines)
|
||||
#find the difference with minimum absolute value
|
||||
unsynLines = dscburstStartLines[np.argmin(np.absolute(dscburstStartLines))]
|
||||
if np.absolute(unsynLines) >= secondarySwath.burstLength:
|
||||
synLines = 0
|
||||
if unsynLines > 0:
|
||||
unsynLines = secondarySwath.burstLength
|
||||
else:
|
||||
unsynLines = -secondarySwath.burstLength
|
||||
else:
|
||||
synLines = secondarySwath.burstLength - np.absolute(unsynLines)
|
||||
|
||||
unsynTime += unsynLines / referenceSwath.prf
|
||||
synPercentage += synLines / referenceSwath.burstLength * 100.0
|
||||
|
||||
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(synLines / referenceSwath.burstLength * 100.0), 'runBaseline')
|
||||
|
||||
############################################################################################
|
||||
#illustration of the sign of the number of unsynchronized lines (unsynLines)
|
||||
#The convention is the same as ampcor offset, that is,
|
||||
# secondaryLineNumber = referenceLineNumber + unsynLines
|
||||
#
|
||||
# |-----------------------| ------------
|
||||
# | | ^
|
||||
# | | |
|
||||
# | | | unsynLines < 0
|
||||
# | | |
|
||||
# | | \ /
|
||||
# | | |-----------------------|
|
||||
# | | | |
|
||||
# | | | |
|
||||
# |-----------------------| | |
|
||||
# Reference Burst | |
|
||||
# | |
|
||||
# | |
|
||||
# | |
|
||||
# | |
|
||||
# |-----------------------|
|
||||
# Secondary Burst
|
||||
#
|
||||
#
|
||||
############################################################################################
|
||||
|
||||
##burst parameters for ScanSAR wide mode not estimed yet
|
||||
elif self._insar.modeCombination == 31:
|
||||
#scansar is reference
|
||||
scburstStartLine = (referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf + azoff
|
||||
#secondary burst start times corresponding to reference burst start times (100% synchronization)
|
||||
for k in range(-100000, 100000):
|
||||
saz_burstx = scburstStartLine + referenceSwath.burstCycleLength * k
|
||||
st_burstx = secondarySwath.sensingStart + datetime.timedelta(seconds=saz_burstx / referenceSwath.prf)
|
||||
if saz_burstx >= 0.0 and saz_burstx <= secondarySwath.numberOfLines -1:
|
||||
secondarySwath.burstStartTime = st_burstx
|
||||
secondarySwath.burstLength = referenceSwath.burstLength
|
||||
secondarySwath.burstCycleLength = referenceSwath.burstCycleLength
|
||||
secondarySwath.swathNumber = referenceSwath.swathNumber
|
||||
break
|
||||
#unsynLines = 0
|
||||
#synLines = referenceSwath.burstLength
|
||||
#unsynTime += unsynLines / referenceSwath.prf
|
||||
#synPercentage += synLines / referenceSwath.burstLength * 100.0
|
||||
catalog.addItem('burst synchronization of frame {} swath {}'.format(frameNumber, swathNumber), '%.1f%%'%(100.0), 'runBaseline')
|
||||
else:
|
||||
pass
|
||||
|
||||
#overwrite original frame parameter file
|
||||
if self._insar.modeCombination == 31:
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
self._insar.saveProduct(secondaryTrack.frames[i], os.path.join(frameDir, self._insar.secondaryFrameParameter))
|
||||
|
||||
#getting average
|
||||
if self._insar.modeCombination == 21:
|
||||
unsynTime /= numberOfFrames*numberOfSwaths
|
||||
synPercentage /= numberOfFrames*numberOfSwaths
|
||||
elif self._insar.modeCombination == 31:
|
||||
unsynTime = 0.
|
||||
synPercentage = 100.
|
||||
else:
|
||||
pass
|
||||
|
||||
#record results
|
||||
if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 31):
|
||||
self._insar.burstUnsynchronizedTime = unsynTime
|
||||
self._insar.burstSynchronization = synPercentage
|
||||
catalog.addItem('burst synchronization averaged', '%.1f%%'%(synPercentage), 'runBaseline')
|
||||
|
||||
|
||||
##################################################
|
||||
#3. compute baseline
|
||||
##################################################
|
||||
#only compute baseline at four corners and center of the reference track
|
||||
bboxRdr = getBboxRdr(referenceTrack)
|
||||
|
||||
rangeMin = bboxRdr[0]
|
||||
rangeMax = bboxRdr[1]
|
||||
azimuthTimeMin = bboxRdr[2]
|
||||
azimuthTimeMax = bboxRdr[3]
|
||||
|
||||
azimuthTimeMid = azimuthTimeMin+datetime.timedelta(seconds=(azimuthTimeMax-azimuthTimeMin).total_seconds()/2.0)
|
||||
rangeMid = (rangeMin + rangeMax) / 2.0
|
||||
|
||||
points = [[azimuthTimeMin, rangeMin],
|
||||
[azimuthTimeMin, rangeMax],
|
||||
[azimuthTimeMax, rangeMin],
|
||||
[azimuthTimeMax, rangeMax],
|
||||
[azimuthTimeMid, rangeMid]]
|
||||
|
||||
Bpar = []
|
||||
Bperp = []
|
||||
#modify Piyush's code for computing baslines
|
||||
refElp = Planet(pname='Earth').ellipsoid
|
||||
for x in points:
|
||||
referenceSV = referenceTrack.orbit.interpolate(x[0], method='hermite')
|
||||
target = referenceTrack.orbit.rdr2geo(x[0], x[1])
|
||||
|
||||
slvTime, slvrng = secondaryTrack.orbit.geo2rdr(target)
|
||||
secondarySV = secondaryTrack.orbit.interpolateOrbit(slvTime, method='hermite')
|
||||
|
||||
targxyz = np.array(refElp.LLH(target[0], target[1], target[2]).ecef().tolist())
|
||||
mxyz = np.array(referenceSV.getPosition())
|
||||
mvel = np.array(referenceSV.getVelocity())
|
||||
sxyz = np.array(secondarySV.getPosition())
|
||||
|
||||
#to fix abrupt change near zero in baseline grid. JUN-05-2020
|
||||
mvelunit = mvel / np.linalg.norm(mvel)
|
||||
sxyz = sxyz - np.dot ( sxyz-mxyz, mvelunit) * mvelunit
|
||||
|
||||
aa = np.linalg.norm(sxyz-mxyz)
|
||||
costheta = (x[1]*x[1] + aa*aa - slvrng*slvrng)/(2.*x[1]*aa)
|
||||
|
||||
Bpar.append(aa*costheta)
|
||||
|
||||
perp = aa * np.sqrt(1 - costheta*costheta)
|
||||
direction = np.sign(np.dot( np.cross(targxyz-mxyz, sxyz-mxyz), mvel))
|
||||
Bperp.append(direction*perp)
|
||||
|
||||
catalog.addItem('parallel baseline at upperleft of reference track', Bpar[0], 'runBaseline')
|
||||
catalog.addItem('parallel baseline at upperright of reference track', Bpar[1], 'runBaseline')
|
||||
catalog.addItem('parallel baseline at lowerleft of reference track', Bpar[2], 'runBaseline')
|
||||
catalog.addItem('parallel baseline at lowerright of reference track', Bpar[3], 'runBaseline')
|
||||
catalog.addItem('parallel baseline at center of reference track', Bpar[4], 'runBaseline')
|
||||
|
||||
catalog.addItem('perpendicular baseline at upperleft of reference track', Bperp[0], 'runBaseline')
|
||||
catalog.addItem('perpendicular baseline at upperright of reference track', Bperp[1], 'runBaseline')
|
||||
catalog.addItem('perpendicular baseline at lowerleft of reference track', Bperp[2], 'runBaseline')
|
||||
catalog.addItem('perpendicular baseline at lowerright of reference track', Bperp[3], 'runBaseline')
|
||||
catalog.addItem('perpendicular baseline at center of reference track', Bperp[4], 'runBaseline')
|
||||
|
||||
|
||||
##################################################
|
||||
#4. compute bounding box
|
||||
##################################################
|
||||
referenceBbox = getBboxGeo(referenceTrack)
|
||||
secondaryBbox = getBboxGeo(secondaryTrack)
|
||||
|
||||
catalog.addItem('reference bounding box', referenceBbox, 'runBaseline')
|
||||
catalog.addItem('secondary bounding box', secondaryBbox, 'runBaseline')
|
||||
|
||||
|
||||
catalog.printToLog(logger, "runBaseline")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,134 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runCoherence')
|
||||
|
||||
def runCoherence(self):
|
||||
'''estimate coherence
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
#referenceTrack = self._insar.loadTrack(reference=True)
|
||||
#secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
numberRangeLooks = self._insar.numberRangeLooks1 * self._insar.numberRangeLooks2
|
||||
numberAzimuthLooks = self._insar.numberAzimuthLooks1 * self._insar.numberAzimuthLooks2
|
||||
|
||||
#here we choose not to scale interferogram and amplitude
|
||||
#scaleAmplitudeInterferogram
|
||||
|
||||
#if (numberRangeLooks >= 5) and (numberAzimuthLooks >= 5):
|
||||
if (numberRangeLooks * numberAzimuthLooks >= 9):
|
||||
cmd = "imageMath.py -e='sqrt(b_0*b_1);abs(a)/(b_0+(b_0==0))/(b_1+(b_1==0))*(b_0!=0)*(b_1!=0)' --a={} --b={} -o {} -t float -s BIL".format(
|
||||
self._insar.multilookDifferentialInterferogram,
|
||||
self._insar.multilookAmplitude,
|
||||
self._insar.multilookCoherence)
|
||||
runCmd(cmd)
|
||||
else:
|
||||
#estimate coherence using a moving window
|
||||
coherence(self._insar.multilookAmplitude, self._insar.multilookDifferentialInterferogram, self._insar.multilookCoherence,
|
||||
method="cchz_wave", windowSize=5)
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runCoherence")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
from isceobj.Util.decorators import use_api
|
||||
@use_api
|
||||
def coherence(amplitudeFile, interferogramFile, coherenceFile, method="cchz_wave", windowSize=5):
|
||||
'''
|
||||
compute coherence using a window
|
||||
'''
|
||||
import operator
|
||||
from mroipac.correlation.correlation import Correlation
|
||||
|
||||
CORRELATION_METHOD = {
|
||||
'phase_gradient' : operator.methodcaller('calculateEffectiveCorrelation'),
|
||||
'cchz_wave' : operator.methodcaller('calculateCorrelation')
|
||||
}
|
||||
|
||||
ampImage = isceobj.createAmpImage()
|
||||
ampImage.load(amplitudeFile + '.xml')
|
||||
ampImage.setAccessMode('read')
|
||||
ampImage.createImage()
|
||||
|
||||
intImage = isceobj.createIntImage()
|
||||
intImage.load(interferogramFile + '.xml')
|
||||
intImage.setAccessMode('read')
|
||||
intImage.createImage()
|
||||
|
||||
#there is no coherence image in the isceobj/Image
|
||||
cohImage = isceobj.createOffsetImage()
|
||||
cohImage.setFilename(coherenceFile)
|
||||
cohImage.setWidth(ampImage.width)
|
||||
cohImage.setAccessMode('write')
|
||||
cohImage.createImage()
|
||||
|
||||
cor = Correlation()
|
||||
cor.configure()
|
||||
cor.wireInputPort(name='amplitude', object=ampImage)
|
||||
cor.wireInputPort(name='interferogram', object=intImage)
|
||||
cor.wireOutputPort(name='correlation', object=cohImage)
|
||||
|
||||
cor.windowSize = windowSize
|
||||
|
||||
cohImage.finalizeImage()
|
||||
intImage.finalizeImage()
|
||||
ampImage.finalizeImage()
|
||||
|
||||
try:
|
||||
CORRELATION_METHOD[method](cor)
|
||||
except KeyError:
|
||||
print("Unrecognized correlation method")
|
||||
sys.exit(1)
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def scaleAmplitudeInterferogram(amplitudeFile, interferogramFile, ratio=100000.0):
|
||||
'''
|
||||
scale amplitude and interferogram, and balace the two channels of amplitude image
|
||||
according to equation (2) in
|
||||
Howard A. Zebker and Katherine Chen, Accurate Estimation of Correlation in InSAR Observations
|
||||
IEEE GEOSCIENCE AND REMOTE SENSING LETTERS, VOL. 2, NO. 2, APRIL 2005.
|
||||
the operation of the program does not affect coherence estimation
|
||||
'''
|
||||
ampObj = isceobj.createImage()
|
||||
ampObj.load(amplitudeFile+'.xml')
|
||||
width = ampObj.width
|
||||
length = ampObj.length
|
||||
|
||||
inf = np.fromfile(interferogramFile, dtype=np.complex64).reshape(length, width)
|
||||
amp = np.fromfile(amplitudeFile, dtype=np.complex64).reshape(length, width)
|
||||
|
||||
flag = (inf!=0)*(amp.real!=0)*(amp.imag!=0)
|
||||
nvalid = np.sum(flag, dtype=np.float64)
|
||||
|
||||
mpwr1 = np.sqrt(np.sum(amp.real * amp.real * flag, dtype=np.float64) / nvalid)
|
||||
mpwr2 = np.sqrt(np.sum(amp.imag * amp.imag * flag, dtype=np.float64) / nvalid)
|
||||
|
||||
amp.real = amp.real / ratio
|
||||
amp.imag = amp.imag / ratio * mpwr1 / mpwr2
|
||||
inf = inf / ratio / ratio * mpwr1 / mpwr2
|
||||
|
||||
amp.astype(np.complex64).tofile(inps.amp)
|
||||
inf.astype(np.complex64).tofile(inps.inf)
|
||||
|
|
@ -0,0 +1,364 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Util.decorators import use_api
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runDenseOffset')
|
||||
|
||||
def runDenseOffset(self):
|
||||
'''estimate offset fied
|
||||
'''
|
||||
if not self.doDenseOffset:
|
||||
return
|
||||
if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)):
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
denseOffsetDir = 'dense_offset'
|
||||
os.makedirs(denseOffsetDir, exist_ok=True)
|
||||
os.chdir(denseOffsetDir)
|
||||
|
||||
#referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter)
|
||||
#secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter)
|
||||
|
||||
#########################################################################################
|
||||
|
||||
if self.useGPU and self._insar.hasGPU():
|
||||
runDenseOffsetGPU(self)
|
||||
#define null value. Lijun said there is actually no such null value in GPU ampcor.
|
||||
nullValue = -10000.0
|
||||
else:
|
||||
runDenseOffsetCPU(self)
|
||||
#define null value
|
||||
nullValue = -10000.0
|
||||
|
||||
#null value set to zero
|
||||
img = isceobj.createImage()
|
||||
img.load(self._insar.denseOffset+'.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
offset=np.memmap(self._insar.denseOffset, dtype='float32', mode='r+', shape=(length*2, width))
|
||||
snr=np.memmap(self._insar.denseOffsetSnr, dtype='float32', mode='r+', shape=(length, width))
|
||||
offsetband1 = offset[0:length*2:2, :]
|
||||
offsetband2 = offset[1:length*2:2, :]
|
||||
index = np.nonzero(np.logical_or(offsetband1==nullValue, offsetband2==nullValue))
|
||||
offsetband1[index] = 0
|
||||
offsetband2[index] = 0
|
||||
snr[index] = 0
|
||||
del offset, offsetband1, offsetband2, snr
|
||||
|
||||
#areas covered by water body set to zero
|
||||
if self.maskOffsetWithWbd:
|
||||
img = isceobj.createImage()
|
||||
img.load('wbd.rdr.xml')
|
||||
width0 = img.width
|
||||
length0 = img.length
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(self._insar.denseOffset+'.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
#get water body mask
|
||||
wbd0=np.memmap('wbd.rdr', dtype=np.int8, mode='r', shape=(length0, width0))
|
||||
wbd0=wbd0[0+self._insar.offsetImageTopoffset:length0:self.offsetSkipHeight,
|
||||
0+self._insar.offsetImageLeftoffset:width0:self.offsetSkipWidth]
|
||||
wbd = np.zeros((length+100, width+100), dtype=np.int8)
|
||||
wbd[0:wbd0.shape[0], 0:wbd0.shape[1]]=wbd0
|
||||
|
||||
#mask offset and snr
|
||||
offset=np.memmap(self._insar.denseOffset, dtype='float32', mode='r+', shape=(length*2, width))
|
||||
snr=np.memmap(self._insar.denseOffsetSnr, dtype='float32', mode='r+', shape=(length, width))
|
||||
(offset[0:length*2:2, :])[np.nonzero(wbd[0:length, 0:width]==-1)]=0
|
||||
(offset[1:length*2:2, :])[np.nonzero(wbd[0:length, 0:width]==-1)]=0
|
||||
snr[np.nonzero(wbd[0:length, 0:width]==-1)]=0
|
||||
|
||||
del wbd0, wbd, offset, snr
|
||||
|
||||
|
||||
#########################################################################################
|
||||
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runDenseOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
#@use_api
|
||||
def runDenseOffsetCPU(self):
|
||||
'''
|
||||
Estimate dense offset field between a pair of SLCs.
|
||||
'''
|
||||
from mroipac.ampcor.DenseAmpcor import DenseAmpcor
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
####For this module currently, we need to create an actual file on disk
|
||||
for infile in [self._insar.referenceSlc, self._insar.secondarySlcCoregistered]:
|
||||
if os.path.isfile(infile):
|
||||
continue
|
||||
cmd = 'gdal_translate -of ENVI {0}.vrt {0}'.format(infile)
|
||||
runCmd(cmd)
|
||||
|
||||
m = isceobj.createSlcImage()
|
||||
m.load(self._insar.referenceSlc + '.xml')
|
||||
m.setAccessMode('READ')
|
||||
|
||||
s = isceobj.createSlcImage()
|
||||
s.load(self._insar.secondarySlcCoregistered + '.xml')
|
||||
s.setAccessMode('READ')
|
||||
|
||||
#objOffset.numberThreads = 1
|
||||
print('\n************* dense offset estimation parameters *************')
|
||||
print('reference SLC: %s' % (self._insar.referenceSlc))
|
||||
print('secondary SLC: %s' % (self._insar.secondarySlcCoregistered))
|
||||
print('dense offset estimation window width: %d' % (self.offsetWindowWidth))
|
||||
print('dense offset estimation window hight: %d' % (self.offsetWindowHeight))
|
||||
print('dense offset search window width: %d' % (self.offsetSearchWindowWidth))
|
||||
print('dense offset search window hight: %d' % (self.offsetSearchWindowHeight))
|
||||
print('dense offset skip width: %d' % (self.offsetSkipWidth))
|
||||
print('dense offset skip hight: %d' % (self.offsetSkipHeight))
|
||||
print('dense offset covariance surface oversample factor: %d' % (self.offsetCovarianceOversamplingFactor))
|
||||
print('dense offset covariance surface oversample window size: %d\n' % (self.offsetCovarianceOversamplingWindowsize))
|
||||
|
||||
|
||||
objOffset = DenseAmpcor(name='dense')
|
||||
objOffset.configure()
|
||||
|
||||
if m.dataType.startswith('C'):
|
||||
objOffset.setImageDataType1('complex')
|
||||
else:
|
||||
objOffset.setImageDataType1('real')
|
||||
if s.dataType.startswith('C'):
|
||||
objOffset.setImageDataType2('complex')
|
||||
else:
|
||||
objOffset.setImageDataType2('real')
|
||||
|
||||
objOffset.offsetImageName = self._insar.denseOffset
|
||||
objOffset.snrImageName = self._insar.denseOffsetSnr
|
||||
objOffset.covImageName = self._insar.denseOffsetCov
|
||||
|
||||
objOffset.setWindowSizeWidth(self.offsetWindowWidth)
|
||||
objOffset.setWindowSizeHeight(self.offsetWindowHeight)
|
||||
#NOTE: actual number of resulting correlation pixels: self.offsetSearchWindowWidth*2+1
|
||||
objOffset.setSearchWindowSizeWidth(self.offsetSearchWindowWidth)
|
||||
objOffset.setSearchWindowSizeHeight(self.offsetSearchWindowHeight)
|
||||
objOffset.setSkipSampleAcross(self.offsetSkipWidth)
|
||||
objOffset.setSkipSampleDown(self.offsetSkipHeight)
|
||||
objOffset.setOversamplingFactor(self.offsetCovarianceOversamplingFactor)
|
||||
objOffset.setZoomWindowSize(self.offsetCovarianceOversamplingWindowsize)
|
||||
objOffset.setAcrossGrossOffset(0)
|
||||
objOffset.setDownGrossOffset(0)
|
||||
#these are azimuth scaling factor
|
||||
#Matching Scale for Sample/Line Directions (-) = 1.000000551500 1.000002373200
|
||||
objOffset.setFirstPRF(1.0)
|
||||
objOffset.setSecondPRF(1.0)
|
||||
|
||||
objOffset.denseampcor(m, s)
|
||||
|
||||
### Store params for later
|
||||
self._insar.offsetImageTopoffset = objOffset.locationDown[0][0]
|
||||
self._insar.offsetImageLeftoffset = objOffset.locationAcross[0][0]
|
||||
|
||||
#change band order
|
||||
width=objOffset.offsetCols
|
||||
length=objOffset.offsetLines
|
||||
|
||||
offset1 = np.fromfile(self._insar.denseOffset, dtype=np.float32).reshape(length*2, width)
|
||||
offset2 = np.zeros((length*2, width), dtype=np.float32)
|
||||
offset2[0:length*2:2, :] = offset1[1:length*2:2, :]
|
||||
offset2[1:length*2:2, :] = offset1[0:length*2:2, :]
|
||||
|
||||
os.remove(self._insar.denseOffset)
|
||||
os.remove(self._insar.denseOffset+'.vrt')
|
||||
os.remove(self._insar.denseOffset+'.xml')
|
||||
|
||||
offset2.astype(np.float32).tofile(self._insar.denseOffset)
|
||||
outImg = isceobj.createImage()
|
||||
outImg.setDataType('FLOAT')
|
||||
outImg.setFilename(self._insar.denseOffset)
|
||||
outImg.setBands(2)
|
||||
outImg.scheme = 'BIL'
|
||||
outImg.setWidth(width)
|
||||
outImg.setLength(length)
|
||||
outImg.addDescription('two-band pixel offset file. 1st band: range offset, 2nd band: azimuth offset')
|
||||
outImg.setAccessMode('read')
|
||||
outImg.renderHdr()
|
||||
|
||||
return (objOffset.offsetCols, objOffset.offsetLines)
|
||||
|
||||
|
||||
def runDenseOffsetGPU(self):
|
||||
'''
|
||||
Estimate dense offset field between a pair of SLCs.
|
||||
'''
|
||||
from contrib.PyCuAmpcor import PyCuAmpcor
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
############################################################################################
|
||||
# #different from minyan's script: cuDenseOffsets.py: deramp method (0: mag, 1: complex)
|
||||
# objOffset.derampMethod = 2 #
|
||||
# #varying-gross-offset parameters not set
|
||||
|
||||
# #not set in minyan's script: cuDenseOffsets.py
|
||||
# objOffset.corrSurfaceZoomInWindow
|
||||
# objOffset.grossOffsetAcrossStatic = 0
|
||||
# objOffset.grossOffsetDownStatic = 0
|
||||
############################################################################################
|
||||
|
||||
|
||||
####For this module currently, we need to create an actual file on disk
|
||||
for infile in [self._insar.referenceSlc, self._insar.secondarySlcCoregistered]:
|
||||
if os.path.isfile(infile):
|
||||
continue
|
||||
cmd = 'gdal_translate -of ENVI {0}.vrt {0}'.format(infile)
|
||||
runCmd(cmd)
|
||||
|
||||
m = isceobj.createSlcImage()
|
||||
m.load(self._insar.referenceSlc + '.xml')
|
||||
m.setAccessMode('READ')
|
||||
|
||||
s = isceobj.createSlcImage()
|
||||
s.load(self._insar.secondarySlcCoregistered + '.xml')
|
||||
s.setAccessMode('READ')
|
||||
|
||||
print('\n************* dense offset estimation parameters *************')
|
||||
print('reference SLC: %s' % (self._insar.referenceSlc))
|
||||
print('secondary SLC: %s' % (self._insar.secondarySlcCoregistered))
|
||||
print('dense offset estimation window width: %d' % (self.offsetWindowWidth))
|
||||
print('dense offset estimation window hight: %d' % (self.offsetWindowHeight))
|
||||
print('dense offset search window width: %d' % (self.offsetSearchWindowWidth))
|
||||
print('dense offset search window hight: %d' % (self.offsetSearchWindowHeight))
|
||||
print('dense offset skip width: %d' % (self.offsetSkipWidth))
|
||||
print('dense offset skip hight: %d' % (self.offsetSkipHeight))
|
||||
print('dense offset covariance surface oversample factor: %d' % (self.offsetCovarianceOversamplingFactor))
|
||||
|
||||
|
||||
objOffset = PyCuAmpcor.PyCuAmpcor()
|
||||
objOffset.algorithm = 0
|
||||
objOffset.derampMethod = 1 # 1=linear phase ramp, 0=take mag, 2=skip
|
||||
objOffset.referenceImageName = self._insar.referenceSlc
|
||||
objOffset.referenceImageHeight = m.length
|
||||
objOffset.referenceImageWidth = m.width
|
||||
objOffset.secondaryImageName = self._insar.secondarySlcCoregistered
|
||||
objOffset.secondaryImageHeight = s.length
|
||||
objOffset.secondaryImageWidth = s.width
|
||||
objOffset.offsetImageName = self._insar.denseOffset
|
||||
objOffset.grossOffsetImageName = self._insar.denseOffset + ".gross"
|
||||
objOffset.snrImageName = self._insar.denseOffsetSnr
|
||||
objOffset.covImageName = self._insar.denseOffsetCov
|
||||
|
||||
objOffset.windowSizeWidth = self.offsetWindowWidth
|
||||
objOffset.windowSizeHeight = self.offsetWindowHeight
|
||||
|
||||
objOffset.halfSearchRangeAcross = self.offsetSearchWindowWidth
|
||||
objOffset.halfSearchRangeDown = self.offsetSearchWindowHeight
|
||||
|
||||
objOffset.skipSampleDown = self.offsetSkipHeight
|
||||
objOffset.skipSampleAcross = self.offsetSkipWidth
|
||||
|
||||
#Oversampling method for correlation surface(0=fft,1=sinc)
|
||||
objOffset.corrSurfaceOverSamplingMethod = 0
|
||||
objOffset.corrSurfaceOverSamplingFactor = self.offsetCovarianceOversamplingFactor
|
||||
|
||||
# set gross offset
|
||||
objOffset.grossOffsetAcrossStatic = 0
|
||||
objOffset.grossOffsetDownStatic = 0
|
||||
# set the margin
|
||||
margin = 0
|
||||
|
||||
# adjust the margin
|
||||
margin = max(margin, abs(objOffset.grossOffsetAcrossStatic), abs(objOffset.grossOffsetDownStatic))
|
||||
|
||||
# set the starting pixel of the first reference window
|
||||
objOffset.referenceStartPixelDownStatic = margin + self.offsetSearchWindowHeight
|
||||
objOffset.referenceStartPixelAcrossStatic = margin + self.offsetSearchWindowWidth
|
||||
|
||||
# find out the total number of windows
|
||||
objOffset.numberWindowDown = (m.length - 2*margin - 2*self.offsetSearchWindowHeight - self.offsetWindowHeight) // self.offsetSkipHeight
|
||||
objOffset.numberWindowAcross = (m.width - 2*margin - 2*self.offsetSearchWindowWidth - self.offsetWindowWidth) // self.offsetSkipWidth
|
||||
|
||||
# gpu job control
|
||||
objOffset.deviceID = 0
|
||||
objOffset.nStreams = 2
|
||||
objOffset.numberWindowDownInChunk = 1
|
||||
objOffset.numberWindowAcrossInChunk = 64
|
||||
objOffset.mmapSize = 16
|
||||
|
||||
# pass/adjust the parameters
|
||||
objOffset.setupParams()
|
||||
# set up the starting pixels for each window, based on the gross offset
|
||||
objOffset.setConstantGrossOffset(objOffset.grossOffsetAcrossStatic, objOffset.grossOffsetDownStatic)
|
||||
# check whether all pixels are in image range (optional)
|
||||
objOffset.checkPixelInImageRange()
|
||||
print('\n======================================')
|
||||
print('Running PyCuAmpcor...')
|
||||
print('======================================\n')
|
||||
objOffset.runAmpcor()
|
||||
|
||||
### Store params for later
|
||||
# location of the center of the first reference window
|
||||
self._insar.offsetImageTopoffset = objOffset.referenceStartPixelDownStatic + (objOffset.windowSizeHeight-1)//2
|
||||
self._insar.offsetImageLeftoffset = objOffset.referenceStartPixelAcrossStatic +(objOffset.windowSizeWidth-1)//2
|
||||
|
||||
# offset image dimension, the number of windows
|
||||
width = objOffset.numberWindowAcross
|
||||
length = objOffset.numberWindowDown
|
||||
|
||||
# convert the offset image from BIP to BIL
|
||||
offsetBIP = np.fromfile(objOffset.offsetImageName, dtype=np.float32).reshape(length, width*2)
|
||||
offsetBIL = np.zeros((length*2, width), dtype=np.float32)
|
||||
offsetBIL[0:length*2:2, :] = offsetBIP[:, 1:width*2:2]
|
||||
offsetBIL[1:length*2:2, :] = offsetBIP[:, 0:width*2:2]
|
||||
os.remove(objOffset.offsetImageName)
|
||||
offsetBIL.astype(np.float32).tofile(objOffset.offsetImageName)
|
||||
|
||||
# generate offset image description files
|
||||
outImg = isceobj.createImage()
|
||||
outImg.setDataType('FLOAT')
|
||||
outImg.setFilename(objOffset.offsetImageName)
|
||||
outImg.setBands(2)
|
||||
outImg.scheme = 'BIL'
|
||||
outImg.setWidth(objOffset.numberWindowAcross)
|
||||
outImg.setLength(objOffset.numberWindowDown)
|
||||
outImg.addDescription('two-band pixel offset file. 1st band: range offset, 2nd band: azimuth offset')
|
||||
outImg.setAccessMode('read')
|
||||
outImg.renderHdr()
|
||||
|
||||
# gross offset image is not needed, since all zeros
|
||||
|
||||
# generate snr image description files
|
||||
snrImg = isceobj.createImage()
|
||||
snrImg.setFilename( objOffset.snrImageName)
|
||||
snrImg.setDataType('FLOAT')
|
||||
snrImg.setBands(1)
|
||||
snrImg.setWidth(objOffset.numberWindowAcross)
|
||||
snrImg.setLength(objOffset.numberWindowDown)
|
||||
snrImg.setAccessMode('read')
|
||||
snrImg.renderHdr()
|
||||
|
||||
# generate cov image description files
|
||||
# covariance of azimuth/range offsets.
|
||||
# 1st band: cov(az, az), 2nd band: cov(rg, rg), 3rd band: cov(az, rg)
|
||||
covImg = isceobj.createImage()
|
||||
covImg.setFilename(objOffset.covImageName)
|
||||
covImg.setDataType('FLOAT')
|
||||
covImg.setBands(3)
|
||||
covImg.scheme = 'BIP'
|
||||
covImg.setWidth(objOffset.numberWindowAcross)
|
||||
covImg.setLength(objOffset.numberWindowDown)
|
||||
outImg.addDescription('covariance of azimuth/range offsets')
|
||||
covImg.setAccessMode('read')
|
||||
covImg.renderHdr()
|
||||
|
||||
return (objOffset.numberWindowAcross, objOffset.numberWindowDown)
|
||||
|
||||
# end of file
|
||||
|
|
@ -0,0 +1,44 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runDiffInterferogram')
|
||||
|
||||
def runDiffInterferogram(self):
|
||||
'''Extract images.
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
|
||||
rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize
|
||||
radarWavelength = referenceTrack.radarWavelength
|
||||
|
||||
cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{}) * (b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, self._insar.rectRangeOffset, self._insar.differentialInterferogram)
|
||||
runCmd(cmd)
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runDiffInterferogram")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,290 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runDownloadDem')
|
||||
|
||||
def runDownloadDem(self):
|
||||
'''download DEM and water body
|
||||
'''
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
bboxGeo = getBboxGeo(referenceTrack)
|
||||
bbox = np.array(bboxGeo)
|
||||
bboxStr = '{} {} {} {}'.format(int(np.floor(bbox[0])), int(np.ceil(bbox[1])), int(np.floor(bbox[2])), int(np.ceil(bbox[3])))
|
||||
|
||||
|
||||
#get 1 arcsecond dem for coregistration
|
||||
if self.dem == None:
|
||||
demDir = 'dem_1_arcsec'
|
||||
os.makedirs(demDir, exist_ok=True)
|
||||
os.chdir(demDir)
|
||||
|
||||
# downloadUrl = 'http://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11'
|
||||
# cmd = 'dem.py -a stitch -b {} -k -s 1 -c -f -u {}'.format(
|
||||
# bboxStr,
|
||||
# downloadUrl
|
||||
# )
|
||||
# runCmd(cmd)
|
||||
# cmd = 'fixImageXml.py -i demLat_*_*_Lon_*_*.dem.wgs84 -f'
|
||||
# runCmd(cmd)
|
||||
# cmd = 'rm *.hgt* *.log demLat_*_*_Lon_*_*.dem demLat_*_*_Lon_*_*.dem.vrt demLat_*_*_Lon_*_*.dem.xml'
|
||||
# runCmd(cmd)
|
||||
|
||||
#replace the above system calls with function calls
|
||||
downloadDem(list(bbox), demType='version3', resolution=1, fillingValue=-32768, outputFile=None, userName=None, passWord=None)
|
||||
imagePathXml((glob.glob('demLat_*_*_Lon_*_*.dem.wgs84'))[0], fullPath=True)
|
||||
filesRemoved = glob.glob('*.hgt*') + glob.glob('*.log') + glob.glob('demLat_*_*_Lon_*_*.dem') + glob.glob('demLat_*_*_Lon_*_*.dem.vrt') + glob.glob('demLat_*_*_Lon_*_*.dem.xml')
|
||||
for filex in filesRemoved:
|
||||
os.remove(filex)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
self.dem = glob.glob(os.path.join(demDir, 'demLat_*_*_Lon_*_*.dem.wgs84'))[0]
|
||||
|
||||
#get 3 arcsecond dem for geocoding
|
||||
if self.demGeo == None:
|
||||
demGeoDir = 'dem_3_arcsec'
|
||||
os.makedirs(demGeoDir, exist_ok=True)
|
||||
os.chdir(demGeoDir)
|
||||
|
||||
# downloadUrl = 'http://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11'
|
||||
# cmd = 'dem.py -a stitch -b {} -k -s 3 -c -f -u {}'.format(
|
||||
# bboxStr,
|
||||
# downloadUrl
|
||||
# )
|
||||
# runCmd(cmd)
|
||||
# cmd = 'fixImageXml.py -i demLat_*_*_Lon_*_*.dem.wgs84 -f'
|
||||
# runCmd(cmd)
|
||||
# cmd = 'rm *.hgt* *.log demLat_*_*_Lon_*_*.dem demLat_*_*_Lon_*_*.dem.vrt demLat_*_*_Lon_*_*.dem.xml'
|
||||
# runCmd(cmd)
|
||||
|
||||
#replace the above system calls with function calls
|
||||
downloadDem(list(bbox), demType='version3', resolution=3, fillingValue=-32768, outputFile=None, userName=None, passWord=None)
|
||||
imagePathXml((glob.glob('demLat_*_*_Lon_*_*.dem.wgs84'))[0], fullPath=True)
|
||||
filesRemoved = glob.glob('*.hgt*') + glob.glob('*.log') + glob.glob('demLat_*_*_Lon_*_*.dem') + glob.glob('demLat_*_*_Lon_*_*.dem.vrt') + glob.glob('demLat_*_*_Lon_*_*.dem.xml')
|
||||
for filex in filesRemoved:
|
||||
os.remove(filex)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
self.demGeo = glob.glob(os.path.join(demGeoDir, 'demLat_*_*_Lon_*_*.dem.wgs84'))[0]
|
||||
|
||||
#get water body for masking interferogram
|
||||
if self.wbd == None:
|
||||
wbdDir = 'wbd_1_arcsec'
|
||||
os.makedirs(wbdDir, exist_ok=True)
|
||||
os.chdir(wbdDir)
|
||||
|
||||
#cmd = 'wbd.py {}'.format(bboxStr)
|
||||
#runCmd(cmd)
|
||||
download_wbd(int(np.floor(bbox[0])), int(np.ceil(bbox[1])), int(np.floor(bbox[2])), int(np.ceil(bbox[3])))
|
||||
#cmd = 'fixImageXml.py -i swbdLat_*_*_Lon_*_*.wbd -f'
|
||||
#runCmd(cmd)
|
||||
#cmd = 'rm *.log'
|
||||
#runCmd(cmd)
|
||||
|
||||
#replace the above system calls with function calls
|
||||
imagePathXml((glob.glob('swbdLat_*_*_Lon_*_*.wbd'))[0], fullPath=True)
|
||||
filesRemoved = glob.glob('*.log')
|
||||
for filex in filesRemoved:
|
||||
os.remove(filex)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
self.wbd = glob.glob(os.path.join(wbdDir, 'swbdLat_*_*_Lon_*_*.wbd'))[0]
|
||||
|
||||
self._insar.dem = self.dem
|
||||
self._insar.demGeo = self.demGeo
|
||||
self._insar.wbd = self.wbd
|
||||
|
||||
|
||||
catalog.printToLog(logger, "runDownloadDem")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def downloadDem(bbox, demType='version3', resolution=1, fillingValue=-32768, outputFile=None, userName=None, passWord=None):
|
||||
'''
|
||||
bbox: [s, n, w, e]
|
||||
demType: can be 'version3' or 'nasadem'. nasadem is also tested.
|
||||
resolution: 1 or 3, NASADEM only available in 1-arc sec resolution
|
||||
'''
|
||||
import numpy as np
|
||||
import isceobj
|
||||
from contrib.demUtils import createDemStitcher
|
||||
|
||||
ds = createDemStitcher(demType)
|
||||
ds.configure()
|
||||
|
||||
if demType == 'version3':
|
||||
if resolution == 1:
|
||||
ds._url1 = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL1.003/2000.02.11'
|
||||
else:
|
||||
ds._url3 = 'https://e4ftl01.cr.usgs.gov/MEASURES/SRTMGL3.003/2000.02.11'
|
||||
elif demType == 'nasadem':
|
||||
resolution = 1
|
||||
#this url is included in the module
|
||||
#ds._url1 = 'http://e4ftl01.cr.usgs.gov/MEASURES/NASADEM_HGT.001/2000.02.11'
|
||||
else:
|
||||
raise Exception('unknown DEM type, currently supported DEM types: version3 and nasadem')
|
||||
|
||||
ds.setUsername(userName)
|
||||
ds.setPassword(passWord)
|
||||
|
||||
ds._keepAfterFailed = True
|
||||
ds.setCreateXmlMetadata(True)
|
||||
ds.setUseLocalDirectory(False)
|
||||
ds.setFillingValue(fillingValue)
|
||||
ds.setFilling()
|
||||
|
||||
bbox = [int(np.floor(bbox[0])), int(np.ceil(bbox[1])), int(np.floor(bbox[2])), int(np.ceil(bbox[3]))]
|
||||
if outputFile==None:
|
||||
outputFile = ds.defaultName(bbox)
|
||||
|
||||
if not(ds.stitchDems(bbox[0:2],bbox[2:4],resolution,outputFile,'./',keep=True)):
|
||||
print('Could not create a stitched DEM. Some tiles are missing')
|
||||
else:
|
||||
#Apply correction EGM96 -> WGS84
|
||||
demImg = ds.correct()
|
||||
|
||||
#report downloads
|
||||
for k,v in list(ds._downloadReport.items()):
|
||||
print(k,'=',v)
|
||||
|
||||
|
||||
def download_wbd(s, n, w, e):
|
||||
'''
|
||||
download water body
|
||||
water body. (0) --- land; (-1) --- water; (-2) --- no data.
|
||||
|
||||
set no-value pixel inside of latitude [-56, 60] to -1
|
||||
set no-value pixel outside of latitidue [-56, 60] to -2
|
||||
|
||||
look at this figure for SRTM coverage:
|
||||
https://www2.jpl.nasa.gov/srtm/images/SRTM_2-24-2016.gif
|
||||
'''
|
||||
import os
|
||||
import numpy as np
|
||||
import isceobj
|
||||
from iscesys.DataManager import createManager
|
||||
|
||||
latMin = np.floor(s)
|
||||
latMax = np.ceil(n)
|
||||
lonMin = np.floor(w)
|
||||
lonMax = np.ceil(e)
|
||||
|
||||
############################################################
|
||||
#1. download and stitch wbd
|
||||
############################################################
|
||||
sw = createManager('wbd')
|
||||
sw.configure()
|
||||
|
||||
outputFile = sw.defaultName([latMin,latMax,lonMin,lonMax])
|
||||
if os.path.exists(outputFile) and os.path.exists(outputFile+'.xml'):
|
||||
print('water body file: {}'.format(outputFile))
|
||||
print('exists, do not download and correct')
|
||||
return outputFile
|
||||
|
||||
#download and stitch the SWBD tiles
|
||||
sw.noFilling = False
|
||||
sw._fillingValue = -1
|
||||
sw.stitch([latMin,latMax],[lonMin,lonMax])
|
||||
|
||||
|
||||
############################################################
|
||||
#2. replace 'areas with SRTM but no SWBD' with zeros (land)
|
||||
############################################################
|
||||
print('post-process water body file')
|
||||
|
||||
print('get SRTM tiles')
|
||||
srtmListFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'srtm_tiles.txt')
|
||||
with open(srtmListFile) as f:
|
||||
srtmList = f.readlines()
|
||||
srtmList = [x[0:7] for x in srtmList]
|
||||
|
||||
#get tiles that have SRTM DEM, but no SWBD, these are mostly tiles that do not have water body
|
||||
print('get tiles with SRTM and without SWBD')
|
||||
noSwbdListFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'srtm_no_swbd_tiles.txt')
|
||||
with open(noSwbdListFile) as f:
|
||||
noSwbdList = f.readlines()
|
||||
noSwbdList = [x[0:7] for x in noSwbdList]
|
||||
|
||||
print('get SWBD tiles')
|
||||
swbdListFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'swbd_tiles.txt')
|
||||
with open(swbdListFile) as f:
|
||||
swbdList = f.readlines()
|
||||
swbdList = [x[0:7] for x in swbdList]
|
||||
|
||||
|
||||
#read resulting mosaicked water body
|
||||
wbdImage = isceobj.createDemImage()
|
||||
wbdImage.load(outputFile+'.xml')
|
||||
#using memmap instead, which should be faster, since we only have a few pixels to change
|
||||
wbd=np.memmap(outputFile, dtype=np.int8, mode='r+', shape=(wbdImage.length, wbdImage.width))
|
||||
|
||||
#replace 'areas with SRTM but no SWBD' with zeros (land)
|
||||
names, nlats, nlons = sw.createNameListFromBounds([latMin,latMax],[lonMin,lonMax])
|
||||
sign={'S':-1, 'N':1, 'W':-1, 'E':1}
|
||||
for tile in names:
|
||||
print('checking tile: {}'.format(tile))
|
||||
firstLatitude = sign[tile[0].upper()]*int(tile[1:3])+1
|
||||
firstLongitude = sign[tile[3].upper()]*int(tile[4:7])
|
||||
lineOffset = np.int32((firstLatitude - wbdImage.firstLatitude) / wbdImage.deltaLatitude + 0.5)
|
||||
sampleOffset = np.int32((firstLongitude - wbdImage.firstLongitude) / wbdImage.deltaLongitude + 0.5)
|
||||
|
||||
#first line/sample of mosaicked SWBD is integer lat/lon, but it does not include last integer lat/lon line/sample
|
||||
#so here the size is 3600*3600 instead of 3601*3601
|
||||
|
||||
#assuming areas without swbd are water
|
||||
if tile[0:7] not in swbdList:
|
||||
wbd[0+lineOffset:3600+lineOffset, 0+sampleOffset:3600+sampleOffset] = -1
|
||||
#assuming areas with srtm and without swbd are land
|
||||
if tile[0:7] in noSwbdList:
|
||||
wbd[0+lineOffset:3600+lineOffset, 0+sampleOffset:3600+sampleOffset] = 0
|
||||
|
||||
|
||||
############################################################
|
||||
#3. set values outside of lat[-56, 60] to -2 (no data)
|
||||
############################################################
|
||||
print('check water body file')
|
||||
print('set areas outside of lat[-56, 60] to -2 (no data)')
|
||||
for i in range(wbdImage.length):
|
||||
lat = wbdImage.firstLatitude + wbdImage.deltaLatitude * i
|
||||
if lat > 60.0 or lat < -56.0:
|
||||
wbd[i, :] = -2
|
||||
del wbd, wbdImage
|
||||
|
||||
|
||||
return outputFile
|
||||
|
||||
|
||||
def imagePathXml(imageFile, fullPath=True):
|
||||
import os
|
||||
import isceobj
|
||||
from isceobj.Util.ImageUtil import ImageLib as IML
|
||||
|
||||
img = IML.loadImage(imageFile)[0]
|
||||
|
||||
dirname = os.path.dirname(imageFile)
|
||||
if fullPath:
|
||||
fname = os.path.abspath( os.path.join(dirname, os.path.basename(imageFile)))
|
||||
else:
|
||||
fname = os.path.basename(imageFile)
|
||||
|
||||
img.filename = fname
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
|
@ -0,0 +1,178 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import shutil
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from mroipac.filter.Filter import Filter
|
||||
from contrib.alos2filter.alos2filter import psfilt1
|
||||
from mroipac.icu.Icu import Icu
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import renameFile
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runFilt')
|
||||
|
||||
def runFilt(self):
|
||||
'''filter interferogram
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
#referenceTrack = self._insar.loadTrack(reference=True)
|
||||
#secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
filt(self)
|
||||
|
||||
catalog.printToLog(logger, "runFilt")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def filt(self):
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 1. filter interferogram
|
||||
############################################################
|
||||
print('\nfilter interferogram: {}'.format(self._insar.multilookDifferentialInterferogram))
|
||||
|
||||
toBeFiltered = self._insar.multilookDifferentialInterferogram
|
||||
if self.removeMagnitudeBeforeFiltering:
|
||||
toBeFiltered = 'tmp.int'
|
||||
cmd = "imageMath.py -e='a/(abs(a)+(a==0))' --a={} -o {} -t cfloat -s BSQ".format(self._insar.multilookDifferentialInterferogram, toBeFiltered)
|
||||
runCmd(cmd)
|
||||
|
||||
#if shutil.which('psfilt1') != None:
|
||||
if True:
|
||||
intImage = isceobj.createIntImage()
|
||||
intImage.load(toBeFiltered + '.xml')
|
||||
width = intImage.width
|
||||
length = intImage.length
|
||||
# cmd = "psfilt1 {int} {filtint} {width} {filterstrength} 64 16".format(
|
||||
# int = toBeFiltered,
|
||||
# filtint = self._insar.filteredInterferogram,
|
||||
# width = width,
|
||||
# filterstrength = self.filterStrength
|
||||
# )
|
||||
# runCmd(cmd)
|
||||
windowSize = self.filterWinsize
|
||||
stepSize = self.filterStepsize
|
||||
psfilt1(toBeFiltered, self._insar.filteredInterferogram, width, self.filterStrength, windowSize, stepSize)
|
||||
create_xml(self._insar.filteredInterferogram, width, length, 'int')
|
||||
else:
|
||||
#original
|
||||
intImage = isceobj.createIntImage()
|
||||
intImage.load(toBeFiltered + '.xml')
|
||||
intImage.setAccessMode('read')
|
||||
intImage.createImage()
|
||||
width = intImage.width
|
||||
length = intImage.length
|
||||
|
||||
#filtered
|
||||
filtImage = isceobj.createIntImage()
|
||||
filtImage.setFilename(self._insar.filteredInterferogram)
|
||||
filtImage.setWidth(width)
|
||||
filtImage.setAccessMode('write')
|
||||
filtImage.createImage()
|
||||
|
||||
#looks like the ps filtering program keep the original interferogram magnitude, which is bad for phase unwrapping?
|
||||
filters = Filter()
|
||||
filters.wireInputPort(name='interferogram',object=intImage)
|
||||
filters.wireOutputPort(name='filtered interferogram',object=filtImage)
|
||||
filters.goldsteinWerner(alpha=self.filterStrength)
|
||||
intImage.finalizeImage()
|
||||
filtImage.finalizeImage()
|
||||
del intImage, filtImage, filters
|
||||
|
||||
if self.removeMagnitudeBeforeFiltering:
|
||||
os.remove(toBeFiltered)
|
||||
os.remove(toBeFiltered + '.vrt')
|
||||
os.remove(toBeFiltered + '.xml')
|
||||
|
||||
#restore original magnitude
|
||||
tmpFile = 'tmp.int'
|
||||
renameFile(self._insar.filteredInterferogram, tmpFile)
|
||||
cmd = "imageMath.py -e='a*abs(b)' --a={} --b={} -o {} -t cfloat -s BSQ".format(tmpFile, self._insar.multilookDifferentialInterferogram, self._insar.filteredInterferogram)
|
||||
runCmd(cmd)
|
||||
os.remove(tmpFile)
|
||||
os.remove(tmpFile + '.vrt')
|
||||
os.remove(tmpFile + '.xml')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 2. create phase sigma using filtered interferogram
|
||||
############################################################
|
||||
print('\ncreate phase sigma using: {}'.format(self._insar.filteredInterferogram))
|
||||
|
||||
#recreate filtered image
|
||||
filtImage = isceobj.createIntImage()
|
||||
filtImage.load(self._insar.filteredInterferogram + '.xml')
|
||||
filtImage.setAccessMode('read')
|
||||
filtImage.createImage()
|
||||
|
||||
#amplitude image
|
||||
ampImage = isceobj.createAmpImage()
|
||||
ampImage.load(self._insar.multilookAmplitude + '.xml')
|
||||
ampImage.setAccessMode('read')
|
||||
ampImage.createImage()
|
||||
|
||||
#phase sigma correlation image
|
||||
phsigImage = isceobj.createImage()
|
||||
phsigImage.setFilename(self._insar.multilookPhsig)
|
||||
phsigImage.setWidth(width)
|
||||
phsigImage.dataType='FLOAT'
|
||||
phsigImage.bands = 1
|
||||
phsigImage.setImageType('cor')
|
||||
phsigImage.setAccessMode('write')
|
||||
phsigImage.createImage()
|
||||
|
||||
icu = Icu(name='insarapp_filter_icu')
|
||||
icu.configure()
|
||||
icu.unwrappingFlag = False
|
||||
icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage)
|
||||
|
||||
phsigImage.renderHdr()
|
||||
|
||||
filtImage.finalizeImage()
|
||||
ampImage.finalizeImage()
|
||||
phsigImage.finalizeImage()
|
||||
|
||||
del filtImage
|
||||
del ampImage
|
||||
del phsigImage
|
||||
del icu
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 3. mask filtered interferogram using water body
|
||||
############################################################
|
||||
print('\nmask filtered interferogram using: {}'.format(self._insar.multilookWbdOut))
|
||||
|
||||
if self.waterBodyMaskStartingStep=='filt':
|
||||
#if not os.path.exists(self._insar.multilookWbdOut):
|
||||
# catalog.addItem('warning message', 'requested masking interferogram with water body, but water body does not exist', 'runFilt')
|
||||
#else:
|
||||
wbd = np.fromfile(self._insar.multilookWbdOut, dtype=np.int8).reshape(length, width)
|
||||
phsig=np.memmap(self._insar.multilookPhsig, dtype='float32', mode='r+', shape=(length, width))
|
||||
phsig[np.nonzero(wbd==-1)]=0
|
||||
del phsig
|
||||
filt=np.memmap(self._insar.filteredInterferogram, dtype='complex64', mode='r+', shape=(length, width))
|
||||
filt[np.nonzero(wbd==-1)]=0
|
||||
del filt
|
||||
del wbd
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
|
|
@ -0,0 +1,103 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import statistics
|
||||
import numpy as np
|
||||
from scipy.ndimage.filters import median_filter
|
||||
|
||||
import isceobj
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runFiltOffset')
|
||||
|
||||
def runFiltOffset(self):
|
||||
'''filt offset fied
|
||||
'''
|
||||
if not self.doDenseOffset:
|
||||
return
|
||||
if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)):
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
denseOffsetDir = 'dense_offset'
|
||||
os.makedirs(denseOffsetDir, exist_ok=True)
|
||||
os.chdir(denseOffsetDir)
|
||||
|
||||
#referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter)
|
||||
#secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter)
|
||||
|
||||
#########################################################################################
|
||||
|
||||
if not self.doOffsetFiltering:
|
||||
print('offset field filtering is not requested.')
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runFiltOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
return
|
||||
|
||||
windowSize = self.offsetFilterWindowsize
|
||||
nullValue = 0
|
||||
snrThreshold = self.offsetFilterSnrThreshold
|
||||
|
||||
if windowSize < 3:
|
||||
raise Exception('dense offset field filter window size must >= 3')
|
||||
if windowSize % 2 != 1:
|
||||
windowSize += 1
|
||||
print('dense offset field filter window size is not odd, changed to: {}'.format(windowSize))
|
||||
|
||||
print('\noffset filter parameters:')
|
||||
print('**************************************')
|
||||
print('filter window size: {}'.format(windowSize))
|
||||
print('filter null value: {}'.format(nullValue))
|
||||
print('filter snr threshold: {}\n'.format(snrThreshold))
|
||||
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(self._insar.denseOffset+'.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
offset = np.fromfile(self._insar.denseOffset, dtype=np.float32).reshape(length*2, width)
|
||||
snr = np.fromfile(self._insar.denseOffsetSnr, dtype=np.float32).reshape(length, width)
|
||||
offsetFilt = np.zeros((length*2, width), dtype=np.float32)
|
||||
|
||||
edge = int((windowSize-1)/2+0.5)
|
||||
for k in range(2):
|
||||
print('filtering band {} of {}'.format(k+1, 2))
|
||||
band = offset[k:length*2:2, :]
|
||||
bandFilt = offsetFilt[k:length*2:2, :]
|
||||
for i in range(0+edge, length-edge):
|
||||
for j in range(0+edge, width-edge):
|
||||
bandSub = band[i-edge:i+edge+1, j-edge:j+edge+1]
|
||||
snrSub = snr[i-edge:i+edge+1, j-edge:j+edge+1]
|
||||
#bandSubUsed is 1-d numpy array
|
||||
bandSubUsed = bandSub[np.nonzero(np.logical_and(snrSub>snrThreshold, bandSub!=nullValue))]
|
||||
if bandSubUsed.size == 0:
|
||||
bandFilt[i, j] = nullValue
|
||||
else:
|
||||
bandFilt[i, j] = statistics.median(bandSubUsed)
|
||||
|
||||
offsetFilt.astype(np.float32).tofile(self._insar.denseOffsetFilt)
|
||||
outImg = isceobj.createImage()
|
||||
outImg.setDataType('FLOAT')
|
||||
outImg.setFilename(self._insar.denseOffsetFilt)
|
||||
outImg.setBands(2)
|
||||
outImg.scheme = 'BIL'
|
||||
outImg.setWidth(width)
|
||||
outImg.setLength(length)
|
||||
outImg.addDescription('two-band pixel offset file. 1st band: range offset, 2nd band: azimuth offset')
|
||||
outImg.setAccessMode('read')
|
||||
outImg.renderHdr()
|
||||
|
||||
#########################################################################################
|
||||
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runFiltOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,138 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
import stdproc
|
||||
from iscesys.StdOEL.StdOELPy import create_writer
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import readOffset
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runFormInterferogram')
|
||||
|
||||
def runFormInterferogram(self):
|
||||
'''form interferograms.
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
os.chdir(swathDir)
|
||||
|
||||
print('forming interferogram frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
|
||||
#############################################
|
||||
#1. form interferogram
|
||||
#############################################
|
||||
refinedOffsets = readOffset('cull.off')
|
||||
intWidth = int(referenceSwath.numberOfSamples / self._insar.numberRangeLooks1)
|
||||
intLength = int(referenceSwath.numberOfLines / self._insar.numberAzimuthLooks1)
|
||||
dopplerVsPixel = [i/secondarySwath.prf for i in secondarySwath.dopplerVsPixel]
|
||||
|
||||
#reference slc
|
||||
mSLC = isceobj.createSlcImage()
|
||||
mSLC.load(self._insar.referenceSlc+'.xml')
|
||||
mSLC.setAccessMode('read')
|
||||
mSLC.createImage()
|
||||
|
||||
#secondary slc
|
||||
sSLC = isceobj.createSlcImage()
|
||||
sSLC.load(self._insar.secondarySlc+'.xml')
|
||||
sSLC.setAccessMode('read')
|
||||
sSLC.createImage()
|
||||
|
||||
#interferogram
|
||||
interf = isceobj.createIntImage()
|
||||
interf.setFilename(self._insar.interferogram)
|
||||
interf.setWidth(intWidth)
|
||||
interf.setAccessMode('write')
|
||||
interf.createImage()
|
||||
|
||||
#amplitdue
|
||||
amplitude = isceobj.createAmpImage()
|
||||
amplitude.setFilename(self._insar.amplitude)
|
||||
amplitude.setWidth(intWidth)
|
||||
amplitude.setAccessMode('write')
|
||||
amplitude.createImage()
|
||||
|
||||
#create a writer for resamp
|
||||
stdWriter = create_writer("log", "", True, filename="resamp.log")
|
||||
stdWriter.setFileTag("resamp", "log")
|
||||
stdWriter.setFileTag("resamp", "err")
|
||||
stdWriter.setFileTag("resamp", "out")
|
||||
|
||||
|
||||
#set up resampling program now
|
||||
#The setting has been compared with resamp_roi's setting in ROI_pac item by item.
|
||||
#The two kinds of setting are exactly the same. The number of setting items are
|
||||
#exactly the same
|
||||
objResamp = stdproc.createResamp()
|
||||
objResamp.wireInputPort(name='offsets', object=refinedOffsets)
|
||||
objResamp.stdWriter = stdWriter
|
||||
objResamp.setNumberFitCoefficients(6)
|
||||
objResamp.setNumberRangeBin1(referenceSwath.numberOfSamples)
|
||||
objResamp.setNumberRangeBin2(secondarySwath.numberOfSamples)
|
||||
objResamp.setStartLine(1)
|
||||
objResamp.setNumberLines(referenceSwath.numberOfLines)
|
||||
objResamp.setFirstLineOffset(1)
|
||||
objResamp.setDopplerCentroidCoefficients(dopplerVsPixel)
|
||||
objResamp.setRadarWavelength(secondaryTrack.radarWavelength)
|
||||
objResamp.setSlantRangePixelSpacing(secondarySwath.rangePixelSize)
|
||||
objResamp.setNumberRangeLooks(self._insar.numberRangeLooks1)
|
||||
objResamp.setNumberAzimuthLooks(self._insar.numberAzimuthLooks1)
|
||||
objResamp.setFlattenWithOffsetFitFlag(0)
|
||||
objResamp.resamp(mSLC, sSLC, interf, amplitude)
|
||||
|
||||
#finialize images
|
||||
mSLC.finalizeImage()
|
||||
sSLC.finalizeImage()
|
||||
interf.finalizeImage()
|
||||
amplitude.finalizeImage()
|
||||
stdWriter.finalize()
|
||||
|
||||
|
||||
#############################################
|
||||
#2. trim amplitude
|
||||
#############################################
|
||||
# tmpAmplitude = 'tmp.amp'
|
||||
# cmd = "imageMath.py -e='a_0*(a_1>0);a_1*(a_0>0)' --a={} -o={} -s BIP -t float".format(
|
||||
# self._insar.amplitude,
|
||||
# tmpAmplitude
|
||||
# )
|
||||
# runCmd(cmd)
|
||||
# os.remove(self._insar.amplitude)
|
||||
# os.remove(tmpAmplitude+'.xml')
|
||||
# os.remove(tmpAmplitude+'.vrt')
|
||||
# os.rename(tmpAmplitude, self._insar.amplitude)
|
||||
|
||||
#using memmap instead, which should be faster, since we only have a few pixels to change
|
||||
amp=np.memmap(self._insar.amplitude, dtype='complex64', mode='r+', shape=(intLength, intWidth))
|
||||
index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) )
|
||||
amp[index]=0
|
||||
del amp
|
||||
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runFormInterferogram")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
|
@ -0,0 +1,614 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runFrameMosaic')
|
||||
|
||||
def runFrameMosaic(self):
|
||||
'''mosaic frames
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
mosaicDir = 'insar'
|
||||
os.makedirs(mosaicDir, exist_ok=True)
|
||||
os.chdir(mosaicDir)
|
||||
|
||||
numberOfFrames = len(referenceTrack.frames)
|
||||
if numberOfFrames == 1:
|
||||
import shutil
|
||||
frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0]))
|
||||
if not os.path.isfile(self._insar.interferogram):
|
||||
os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram)
|
||||
#shutil.copy2() can overwrite
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
if not os.path.isfile(self._insar.amplitude):
|
||||
os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude)
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
# os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram)
|
||||
# os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
# os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
# os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude)
|
||||
# os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
# os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
#update track parameters
|
||||
#########################################################
|
||||
#mosaic size
|
||||
referenceTrack.numberOfSamples = referenceTrack.frames[0].numberOfSamples
|
||||
referenceTrack.numberOfLines = referenceTrack.frames[0].numberOfLines
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
referenceTrack.startingRange = referenceTrack.frames[0].startingRange
|
||||
referenceTrack.rangeSamplingRate = referenceTrack.frames[0].rangeSamplingRate
|
||||
referenceTrack.rangePixelSize = referenceTrack.frames[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
referenceTrack.sensingStart = referenceTrack.frames[0].sensingStart
|
||||
referenceTrack.prf = referenceTrack.frames[0].prf
|
||||
referenceTrack.azimuthPixelSize = referenceTrack.frames[0].azimuthPixelSize
|
||||
referenceTrack.azimuthLineInterval = referenceTrack.frames[0].azimuthLineInterval
|
||||
|
||||
#update track parameters, secondary
|
||||
#########################################################
|
||||
#mosaic size
|
||||
secondaryTrack.numberOfSamples = secondaryTrack.frames[0].numberOfSamples
|
||||
secondaryTrack.numberOfLines = secondaryTrack.frames[0].numberOfLines
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
secondaryTrack.startingRange = secondaryTrack.frames[0].startingRange
|
||||
secondaryTrack.rangeSamplingRate = secondaryTrack.frames[0].rangeSamplingRate
|
||||
secondaryTrack.rangePixelSize = secondaryTrack.frames[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
secondaryTrack.sensingStart = secondaryTrack.frames[0].sensingStart
|
||||
secondaryTrack.prf = secondaryTrack.frames[0].prf
|
||||
secondaryTrack.azimuthPixelSize = secondaryTrack.frames[0].azimuthPixelSize
|
||||
secondaryTrack.azimuthLineInterval = secondaryTrack.frames[0].azimuthLineInterval
|
||||
|
||||
else:
|
||||
#choose offsets
|
||||
if self.frameOffsetMatching:
|
||||
rangeOffsets = self._insar.frameRangeOffsetMatchingReference
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference
|
||||
else:
|
||||
rangeOffsets = self._insar.frameRangeOffsetGeometricalReference
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference
|
||||
|
||||
#list of input files
|
||||
inputInterferograms = []
|
||||
inputAmplitudes = []
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram))
|
||||
inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude))
|
||||
|
||||
#note that track parameters are updated after mosaicking
|
||||
#mosaic amplitudes
|
||||
frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
|
||||
#mosaic interferograms
|
||||
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
updateTrack=True, phaseCompensation=True, resamplingMethod=1)
|
||||
|
||||
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
|
||||
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
|
||||
|
||||
catalog.addItem('frame phase diff estimated', phaseDiffEst[1:], 'runFrameMosaic')
|
||||
catalog.addItem('frame phase diff used', phaseDiffUsed[1:], 'runFrameMosaic')
|
||||
catalog.addItem('frame phase diff used source', phaseDiffSource[1:], 'runFrameMosaic')
|
||||
catalog.addItem('frame phase diff samples used', numberOfValidSamples[1:], 'runFrameMosaic')
|
||||
|
||||
#update secondary parameters here
|
||||
#do not match for secondary, always use geometrical
|
||||
rangeOffsets = self._insar.frameRangeOffsetGeometricalSecondary
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalSecondary
|
||||
frameMosaicParameters(secondaryTrack, rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1)
|
||||
|
||||
os.chdir('../')
|
||||
#save parameter file
|
||||
self._insar.saveProduct(referenceTrack, self._insar.referenceTrackParameter)
|
||||
self._insar.saveProduct(secondaryTrack, self._insar.secondaryTrackParameter)
|
||||
|
||||
catalog.printToLog(logger, "runFrameMosaic")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def frameMosaic(track, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateTrack=False, phaseCompensation=False, phaseDiffFixed=None, snapThreshold=None, resamplingMethod=0):
|
||||
'''
|
||||
mosaic frames
|
||||
|
||||
track: track
|
||||
inputFiles: input file list
|
||||
output file: output mosaic file
|
||||
rangeOffsets: range offsets
|
||||
azimuthOffsets: azimuth offsets
|
||||
numberOfRangeLooks: number of range looks of the input files
|
||||
numberOfAzimuthLooks: number of azimuth looks of the input files
|
||||
updateTrack: whether update track parameters
|
||||
phaseCompensation: whether do phase compensation for each frame
|
||||
phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one.
|
||||
snapThreshold: this is used with phaseDiffFixed
|
||||
resamplingMethod: 0: amp resampling. 1: int resampling. 2: slc resampling
|
||||
'''
|
||||
import numpy as np
|
||||
|
||||
from contrib.alos2proc_f.alos2proc_f import rect_with_looks
|
||||
from contrib.alos2proc.alos2proc import resamp
|
||||
from isceobj.Alos2Proc.runSwathMosaic import readImage
|
||||
from isceobj.Alos2Proc.runSwathMosaic import findNonzero
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_file
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import computePhaseDiff
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import snap
|
||||
|
||||
numberOfFrames = len(track.frames)
|
||||
frames = track.frames
|
||||
|
||||
rectWidth = []
|
||||
rectLength = []
|
||||
for i in range(numberOfFrames):
|
||||
infImg = isceobj.createImage()
|
||||
infImg.load(inputFiles[i]+'.xml')
|
||||
rectWidth.append(infImg.width)
|
||||
rectLength.append(infImg.length)
|
||||
|
||||
#convert original offset to offset for images with looks
|
||||
#use list instead of np.array to make it consistent with the rest of the code
|
||||
rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets]
|
||||
azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets]
|
||||
|
||||
#get offset relative to the first frame
|
||||
rangeOffsets2 = [0.0]
|
||||
azimuthOffsets2 = [0.0]
|
||||
for i in range(1, numberOfFrames):
|
||||
rangeOffsets2.append(0.0)
|
||||
azimuthOffsets2.append(0.0)
|
||||
for j in range(1, i+1):
|
||||
rangeOffsets2[i] += rangeOffsets1[j]
|
||||
azimuthOffsets2[i] += azimuthOffsets1[j]
|
||||
|
||||
#resample each frame
|
||||
rinfs = []
|
||||
for i, inf in enumerate(inputFiles):
|
||||
rinfs.append("{}_{}{}".format(os.path.splitext(os.path.basename(inf))[0], i, os.path.splitext(os.path.basename(inf))[1]))
|
||||
#do not resample first frame
|
||||
if i == 0:
|
||||
rinfs[i] = inf
|
||||
else:
|
||||
#no need to resample
|
||||
if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001):
|
||||
if os.path.isfile(rinfs[i]):
|
||||
os.remove(rinfs[i])
|
||||
os.symlink(inf, rinfs[i])
|
||||
#all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like
|
||||
#4.99999999999...
|
||||
rangeOffsets2[i] = round(rangeOffsets2[i])
|
||||
azimuthOffsets2[i] = round(azimuthOffsets2[i])
|
||||
|
||||
infImg = isceobj.createImage()
|
||||
infImg.load(inf+'.xml')
|
||||
if infImg.getImageType() == 'amp':
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
|
||||
else:
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
|
||||
else:
|
||||
infImg = isceobj.createImage()
|
||||
infImg.load(inf+'.xml')
|
||||
rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i])
|
||||
azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i])
|
||||
|
||||
if resamplingMethod == 0:
|
||||
rect_with_looks(inf,
|
||||
rinfs[i],
|
||||
infImg.width, infImg.length,
|
||||
infImg.width, infImg.length,
|
||||
1.0, 0.0,
|
||||
0.0, 1.0,
|
||||
rangeOffsets2Frac, azimuthOffsets2Frac,
|
||||
1,1,
|
||||
1,1,
|
||||
'COMPLEX',
|
||||
'Bilinear')
|
||||
if infImg.getImageType() == 'amp':
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
|
||||
else:
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
|
||||
|
||||
elif resamplingMethod == 1:
|
||||
#decompose amplitude and phase
|
||||
phaseFile = 'phase'
|
||||
amplitudeFile = 'amplitude'
|
||||
data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width)
|
||||
phase = np.exp(np.complex64(1j) * np.angle(data))
|
||||
phase[np.nonzero(data==0)] = 0
|
||||
phase.astype(np.complex64).tofile(phaseFile)
|
||||
amplitude = np.absolute(data)
|
||||
amplitude.astype(np.float32).tofile(amplitudeFile)
|
||||
|
||||
#resampling
|
||||
phaseRectFile = 'phaseRect'
|
||||
amplitudeRectFile = 'amplitudeRect'
|
||||
rect_with_looks(phaseFile,
|
||||
phaseRectFile,
|
||||
infImg.width, infImg.length,
|
||||
infImg.width, infImg.length,
|
||||
1.0, 0.0,
|
||||
0.0, 1.0,
|
||||
rangeOffsets2Frac, azimuthOffsets2Frac,
|
||||
1,1,
|
||||
1,1,
|
||||
'COMPLEX',
|
||||
'Sinc')
|
||||
rect_with_looks(amplitudeFile,
|
||||
amplitudeRectFile,
|
||||
infImg.width, infImg.length,
|
||||
infImg.width, infImg.length,
|
||||
1.0, 0.0,
|
||||
0.0, 1.0,
|
||||
rangeOffsets2Frac, azimuthOffsets2Frac,
|
||||
1,1,
|
||||
1,1,
|
||||
'REAL',
|
||||
'Bilinear')
|
||||
|
||||
#recombine amplitude and phase
|
||||
phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(infImg.length, infImg.width)
|
||||
amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(infImg.length, infImg.width)
|
||||
(phase*amplitude).astype(np.complex64).tofile(rinfs[i])
|
||||
|
||||
#tidy up
|
||||
os.remove(phaseFile)
|
||||
os.remove(amplitudeFile)
|
||||
os.remove(phaseRectFile)
|
||||
os.remove(amplitudeRectFile)
|
||||
if infImg.getImageType() == 'amp':
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'amp')
|
||||
else:
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'int')
|
||||
else:
|
||||
resamp(inf,
|
||||
rinfs[i],
|
||||
'fake',
|
||||
'fake',
|
||||
infImg.width, infImg.length,
|
||||
frames[i].swaths[0].prf,
|
||||
frames[i].swaths[0].dopplerVsPixel,
|
||||
[rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
||||
[azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
|
||||
create_xml(rinfs[i], infImg.width, infImg.length, 'slc')
|
||||
|
||||
#determine output width and length
|
||||
#actually no need to calculate in azimuth direction
|
||||
xs = []
|
||||
xe = []
|
||||
ys = []
|
||||
ye = []
|
||||
for i in range(numberOfFrames):
|
||||
if i == 0:
|
||||
xs.append(0)
|
||||
xe.append(rectWidth[i] - 1)
|
||||
ys.append(0)
|
||||
ye.append(rectLength[i] - 1)
|
||||
else:
|
||||
xs.append(0 - int(rangeOffsets2[i]))
|
||||
xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i]))
|
||||
ys.append(0 - int(azimuthOffsets2[i]))
|
||||
ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i]))
|
||||
|
||||
(xmin, xminIndex) = min((v,i) for i,v in enumerate(xs))
|
||||
(xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe))
|
||||
(ymin, yminIndex) = min((v,i) for i,v in enumerate(ys))
|
||||
(ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye))
|
||||
|
||||
outWidth = xmax - xmin + 1
|
||||
outLength = ymax - ymin + 1
|
||||
|
||||
|
||||
#prepare for mosaicing using numpy
|
||||
xs = [x-xmin for x in xs]
|
||||
xe = [x-xmin for x in xe]
|
||||
ys = [y-ymin for y in ys]
|
||||
ye = [y-ymin for y in ye]
|
||||
|
||||
|
||||
#compute phase offset
|
||||
if phaseCompensation:
|
||||
|
||||
phaseDiffEst = [0.0 for i in range(numberOfFrames)]
|
||||
phaseDiffUsed = [0.0 for i in range(numberOfFrames)]
|
||||
phaseDiffSource = ['estimated' for i in range(numberOfFrames)]
|
||||
numberOfValidSamples = [0 for i in range(numberOfFrames)]
|
||||
#phaseDiffEst = [0.0]
|
||||
#phaseDiffUsed = [0.0]
|
||||
#phaseDiffSource = ['estimated']
|
||||
|
||||
phaseOffsetPolynomials = [np.array([0.0])]
|
||||
for i in range(1, numberOfFrames):
|
||||
upperframe = np.zeros((ye[i-1]-ys[i]+1, outWidth), dtype=np.complex128)
|
||||
lowerframe = np.zeros((ye[i-1]-ys[i]+1, outWidth), dtype=np.complex128)
|
||||
#upper frame
|
||||
if os.path.isfile(rinfs[i-1]):
|
||||
upperframe[:,xs[i-1]:xe[i-1]+1] = readImage(rinfs[i-1], rectWidth[i-1], rectLength[i-1], 0, rectWidth[i-1]-1, ys[i]-ys[i-1], ye[i-1]-ys[i-1])
|
||||
else:
|
||||
upperframe[:,xs[i-1]:xe[i-1]+1] = readImageFromVrt(rinfs[i-1], 0, rectWidth[i-1]-1, ys[i]-ys[i-1], ye[i-1]-ys[i-1])
|
||||
#lower frame
|
||||
if os.path.isfile(rinfs[i]):
|
||||
lowerframe[:,xs[i]:xe[i]+1] = readImage(rinfs[i], rectWidth[i], rectLength[i], 0, rectWidth[i]-1, 0, ye[i-1]-ys[i])
|
||||
else:
|
||||
lowerframe[:,xs[i]:xe[i]+1] = readImageFromVrt(rinfs[i], 0, rectWidth[i]-1, 0, ye[i-1]-ys[i])
|
||||
#get a polynomial
|
||||
diff = np.sum(upperframe * np.conj(lowerframe), axis=0)
|
||||
(firstLine, lastLine, firstSample, lastSample) = findNonzero(np.reshape(diff, (1, outWidth)))
|
||||
#here i use mean value(deg=0) in case difference is around -pi or pi.
|
||||
#!!!!!there have been updates, now deg must be 0
|
||||
deg = 0
|
||||
p = np.polyfit(np.arange(firstSample, lastSample+1), np.angle(diff[firstSample:lastSample+1]), deg)
|
||||
|
||||
#need to use a more sophisticated method to compute the mean phase difference
|
||||
(phaseDiffEst[i], numberOfValidSamples[i]) = computePhaseDiff(upperframe, lowerframe, coherenceWindowSize=9, coherenceThreshold=0.80)
|
||||
|
||||
#snap phase difference to fixed values
|
||||
if phaseDiffFixed is not None:
|
||||
(outputValue, snapped) = snap(phaseDiffEst[i], phaseDiffFixed, snapThreshold)
|
||||
if snapped == True:
|
||||
phaseDiffUsed[i] = outputValue
|
||||
phaseDiffSource[i] = 'estimated+snap'
|
||||
else:
|
||||
phaseDiffUsed[i] = phaseDiffEst[i]
|
||||
phaseDiffSource[i] = 'estimated'
|
||||
else:
|
||||
phaseDiffUsed[i] = phaseDiffEst[i]
|
||||
phaseDiffSource[i] = 'estimated'
|
||||
|
||||
#use new phase constant value
|
||||
p[-1] = phaseDiffUsed[i]
|
||||
|
||||
phaseOffsetPolynomials.append(p)
|
||||
|
||||
|
||||
#check fit result
|
||||
DEBUG = False
|
||||
if DEBUG:
|
||||
#create a dir and work in this dir
|
||||
diffDir = 'frame_mosaic'
|
||||
os.makedirs(diffDir, exist_ok=True)
|
||||
os.chdir(diffDir)
|
||||
|
||||
#dump phase difference
|
||||
diffFilename = 'phase_difference_frame{}-frame{}.int'.format(i, i+1)
|
||||
(upperframe * np.conj(lowerframe)).astype(np.complex64).tofile(diffFilename)
|
||||
create_xml(diffFilename, outWidth, ye[i-1]-ys[i]+1, 'int')
|
||||
|
||||
#plot phase difference vs range
|
||||
import matplotlib.pyplot as plt
|
||||
x = np.arange(firstSample, lastSample+1)
|
||||
y = np.angle(diff[firstSample:lastSample+1])
|
||||
plt.plot(x, y, label='original phase difference')
|
||||
plt.plot(x, np.polyval(p, x), label='fitted phase difference')
|
||||
plt.legend()
|
||||
|
||||
plt.minorticks_on()
|
||||
plt.tick_params('both', length=10, which='major')
|
||||
plt.tick_params('both', length=5, which='minor')
|
||||
|
||||
plt.xlabel('Range Sample Number [Samples]')
|
||||
plt.ylabel('Phase Difference [Rad]')
|
||||
plt.savefig('phase_difference_frame{}-frame{}.pdf'.format(i, i+1))
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
#mosaic file
|
||||
outFp = open(outputfile,'wb')
|
||||
for i in range(numberOfFrames):
|
||||
print('adding frame: {}'.format(i+1))
|
||||
|
||||
#phase offset in the polynomials
|
||||
if phaseCompensation:
|
||||
cJ = np.complex64(1j)
|
||||
phaseOffset = np.ones(outWidth, dtype=np.complex64)
|
||||
for j in range(i+1):
|
||||
phaseOffset *= np.exp(cJ*np.polyval(phaseOffsetPolynomials[j], np.arange(outWidth)))
|
||||
|
||||
#get start line number (starts with zero)
|
||||
if i == 0:
|
||||
ys1 = 0
|
||||
else:
|
||||
ys1 = int((ye[i-1]+ys[i])/2.0) + 1 - ys[i]
|
||||
#get end line number (start with zero)
|
||||
if i == numberOfFrames-1:
|
||||
ye1 = rectLength[i] - 1
|
||||
else:
|
||||
ye1 = int((ye[i]+ys[i+1])/2.0) - ys[i]
|
||||
|
||||
#get image format
|
||||
inputimage = find_vrt_file(rinfs[i]+'.vrt', 'SourceFilename', relative_path=True)
|
||||
byteorder = find_vrt_keyword(rinfs[i]+'.vrt', 'ByteOrder')
|
||||
if byteorder == 'LSB':
|
||||
swapByte = False
|
||||
else:
|
||||
swapByte = True
|
||||
imageoffset = int(find_vrt_keyword(rinfs[i]+'.vrt', 'ImageOffset'))
|
||||
lineoffset = int(find_vrt_keyword(rinfs[i]+'.vrt', 'LineOffset'))
|
||||
|
||||
#read image
|
||||
with open(inputimage,'rb') as fp:
|
||||
for j in range(ys1, ye1+1):
|
||||
fp.seek(imageoffset+j*lineoffset, 0)
|
||||
data = np.zeros(outWidth, dtype=np.complex64)
|
||||
if swapByte:
|
||||
tmp = np.fromfile(fp, dtype='>f', count=2*rectWidth[i])
|
||||
cJ = np.complex64(1j)
|
||||
data[xs[i]:xe[i]+1] = tmp[0::2] + cJ * tmp[1::2]
|
||||
else:
|
||||
data[xs[i]:xe[i]+1] = np.fromfile(fp, dtype=np.complex64, count=rectWidth[i])
|
||||
if phaseCompensation:
|
||||
data *= phaseOffset
|
||||
data.astype(np.complex64).tofile(outFp)
|
||||
outFp.close()
|
||||
|
||||
|
||||
#delete files. DO NOT DELETE THE FIRST ONE!!!
|
||||
for i in range(numberOfFrames):
|
||||
if i == 0:
|
||||
continue
|
||||
os.remove(rinfs[i])
|
||||
os.remove(rinfs[i]+'.vrt')
|
||||
os.remove(rinfs[i]+'.xml')
|
||||
|
||||
|
||||
#update frame parameters
|
||||
if updateTrack:
|
||||
#mosaic size
|
||||
track.numberOfSamples = outWidth
|
||||
track.numberOfLines = outLength
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
track.startingRange = frames[0].startingRange + (int(rangeOffsets2[0]) - int(rangeOffsets2[xminIndex])) * numberOfRangeLooks * frames[0].rangePixelSize
|
||||
track.rangeSamplingRate = frames[0].rangeSamplingRate
|
||||
track.rangePixelSize = frames[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
track.sensingStart = frames[0].sensingStart
|
||||
track.prf = frames[0].prf
|
||||
track.azimuthPixelSize = frames[0].azimuthPixelSize
|
||||
track.azimuthLineInterval = frames[0].azimuthLineInterval
|
||||
|
||||
if phaseCompensation:
|
||||
# estimated phase diff, used phase diff, used phase diff source
|
||||
return (phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples)
|
||||
|
||||
|
||||
def frameMosaicParameters(track, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks):
|
||||
'''
|
||||
mosaic frames (this simplified version of frameMosaic to only update parameters)
|
||||
|
||||
track: track
|
||||
rangeOffsets: range offsets
|
||||
azimuthOffsets: azimuth offsets
|
||||
numberOfRangeLooks: number of range looks of the input files
|
||||
numberOfAzimuthLooks: number of azimuth looks of the input files
|
||||
'''
|
||||
|
||||
numberOfFrames = len(track.frames)
|
||||
frames = track.frames
|
||||
|
||||
rectWidth = []
|
||||
rectLength = []
|
||||
for i in range(numberOfFrames):
|
||||
rectWidth.append(frames[i].numberOfSamples)
|
||||
rectLength.append(frames[i].numberOfLines)
|
||||
|
||||
#convert original offset to offset for images with looks
|
||||
#use list instead of np.array to make it consistent with the rest of the code
|
||||
rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets]
|
||||
azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets]
|
||||
|
||||
#get offset relative to the first frame
|
||||
rangeOffsets2 = [0.0]
|
||||
azimuthOffsets2 = [0.0]
|
||||
for i in range(1, numberOfFrames):
|
||||
rangeOffsets2.append(0.0)
|
||||
azimuthOffsets2.append(0.0)
|
||||
for j in range(1, i+1):
|
||||
rangeOffsets2[i] += rangeOffsets1[j]
|
||||
azimuthOffsets2[i] += azimuthOffsets1[j]
|
||||
|
||||
#determine output width and length
|
||||
#actually no need to calculate in azimuth direction
|
||||
xs = []
|
||||
xe = []
|
||||
ys = []
|
||||
ye = []
|
||||
for i in range(numberOfFrames):
|
||||
if i == 0:
|
||||
xs.append(0)
|
||||
xe.append(rectWidth[i] - 1)
|
||||
ys.append(0)
|
||||
ye.append(rectLength[i] - 1)
|
||||
else:
|
||||
xs.append(0 - int(rangeOffsets2[i]))
|
||||
xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i]))
|
||||
ys.append(0 - int(azimuthOffsets2[i]))
|
||||
ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i]))
|
||||
|
||||
(xmin, xminIndex) = min((v,i) for i,v in enumerate(xs))
|
||||
(xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe))
|
||||
(ymin, yminIndex) = min((v,i) for i,v in enumerate(ys))
|
||||
(ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye))
|
||||
|
||||
outWidth = xmax - xmin + 1
|
||||
outLength = ymax - ymin + 1
|
||||
|
||||
#update frame parameters
|
||||
#mosaic size
|
||||
track.numberOfSamples = outWidth
|
||||
track.numberOfLines = outLength
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
track.startingRange = frames[0].startingRange + (int(rangeOffsets2[0]) - int(rangeOffsets2[xminIndex])) * numberOfRangeLooks * frames[0].rangePixelSize
|
||||
track.rangeSamplingRate = frames[0].rangeSamplingRate
|
||||
track.rangePixelSize = frames[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
track.sensingStart = frames[0].sensingStart
|
||||
track.prf = frames[0].prf
|
||||
track.azimuthPixelSize = frames[0].azimuthPixelSize
|
||||
track.azimuthLineInterval = frames[0].azimuthLineInterval
|
||||
|
||||
|
||||
def readImageFromVrt(inputfile, startSample, endSample, startLine, endLine):
|
||||
'''
|
||||
read a chunk of image
|
||||
the indexes (startSample, endSample, startLine, endLine) are included and start with zero
|
||||
|
||||
memmap is not used, because it is much slower
|
||||
|
||||
tested against readImage in runSwathMosaic.py
|
||||
'''
|
||||
import os
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_file
|
||||
|
||||
inputimage = find_vrt_file(inputfile+'.vrt', 'SourceFilename', relative_path=True)
|
||||
byteorder = find_vrt_keyword(inputfile+'.vrt', 'ByteOrder')
|
||||
if byteorder == 'LSB':
|
||||
swapByte = False
|
||||
else:
|
||||
swapByte = True
|
||||
imageoffset = int(find_vrt_keyword(inputfile+'.vrt', 'ImageOffset'))
|
||||
lineoffset = int(find_vrt_keyword(inputfile+'.vrt', 'LineOffset'))
|
||||
|
||||
data = np.zeros((endLine-startLine+1, endSample-startSample+1), dtype=np.complex64)
|
||||
with open(inputimage,'rb') as fp:
|
||||
#fp.seek(imageoffset, 0)
|
||||
#for i in range(endLine-startLine+1):
|
||||
for i in range(startLine, endLine+1):
|
||||
fp.seek(imageoffset+i*lineoffset+startSample*8, 0)
|
||||
if swapByte:
|
||||
tmp = np.fromfile(fp, dtype='>f', count=2*(endSample-startSample+1))
|
||||
cJ = np.complex64(1j)
|
||||
data[i-startLine] = tmp[0::2] + cJ * tmp[1::2]
|
||||
else:
|
||||
data[i-startLine] = np.fromfile(fp, dtype=np.complex64, count=endSample-startSample+1)
|
||||
return data
|
||||
|
|
@ -0,0 +1,290 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
import isceobj
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runFrameOffset')
|
||||
|
||||
def runFrameOffset(self):
|
||||
'''estimate frame offsets.
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
mosaicDir = 'insar'
|
||||
os.makedirs(mosaicDir, exist_ok=True)
|
||||
os.chdir(mosaicDir)
|
||||
|
||||
if len(referenceTrack.frames) > 1:
|
||||
if (self._insar.modeCombination == 21) or \
|
||||
(self._insar.modeCombination == 22) or \
|
||||
(self._insar.modeCombination == 31) or \
|
||||
(self._insar.modeCombination == 32):
|
||||
matchingMode=0
|
||||
else:
|
||||
matchingMode=1
|
||||
|
||||
#compute swath offset
|
||||
offsetReference = frameOffset(referenceTrack, self._insar.referenceSlc, self._insar.referenceFrameOffset,
|
||||
crossCorrelation=self.frameOffsetMatching, matchingMode=matchingMode)
|
||||
#only use geometrical offset for secondary
|
||||
offsetSecondary = frameOffset(secondaryTrack, self._insar.secondarySlc, self._insar.secondaryFrameOffset,
|
||||
crossCorrelation=False, matchingMode=matchingMode)
|
||||
|
||||
self._insar.frameRangeOffsetGeometricalReference = offsetReference[0]
|
||||
self._insar.frameAzimuthOffsetGeometricalReference = offsetReference[1]
|
||||
self._insar.frameRangeOffsetGeometricalSecondary = offsetSecondary[0]
|
||||
self._insar.frameAzimuthOffsetGeometricalSecondary = offsetSecondary[1]
|
||||
if self.frameOffsetMatching:
|
||||
self._insar.frameRangeOffsetMatchingReference = offsetReference[2]
|
||||
self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3]
|
||||
#self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2]
|
||||
#self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3]
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runFrameOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def frameOffset(track, image, outputfile, crossCorrelation=True, matchingMode=0):
|
||||
'''
|
||||
compute frame offset
|
||||
track: track object
|
||||
image: image for doing matching
|
||||
outputfile: output txt file for saving frame offset
|
||||
crossCorrelation: whether do matching
|
||||
matchingMode: how to match images. 0: ScanSAR full-aperture image, 1: regular image
|
||||
'''
|
||||
|
||||
rangeOffsetGeometrical = []
|
||||
azimuthOffsetGeometrical = []
|
||||
rangeOffsetMatching = []
|
||||
azimuthOffsetMatching = []
|
||||
|
||||
for j in range(len(track.frames)):
|
||||
frameNumber = track.frames[j].frameNumber
|
||||
swathNumber = track.frames[j].swaths[0].swathNumber
|
||||
swathDir = 'f{}_{}/s{}'.format(j+1, frameNumber, swathNumber)
|
||||
|
||||
print('estimate offset frame {}'.format(frameNumber))
|
||||
|
||||
if j == 0:
|
||||
rangeOffsetGeometrical.append(0.0)
|
||||
azimuthOffsetGeometrical.append(0.0)
|
||||
rangeOffsetMatching.append(0.0)
|
||||
azimuthOffsetMatching.append(0.0)
|
||||
swathDirLast = swathDir
|
||||
continue
|
||||
|
||||
image1 = os.path.join('../', swathDirLast, image)
|
||||
image2 = os.path.join('../', swathDir, image)
|
||||
#swath1 = frame.swaths[j-1]
|
||||
#swath2 = frame.swaths[j]
|
||||
swath1 = track.frames[j-1].swaths[0]
|
||||
swath2 = track.frames[j].swaths[0]
|
||||
|
||||
|
||||
#offset from geometry
|
||||
offsetGeometrical = computeFrameOffset(swath1, swath2)
|
||||
rangeOffsetGeometrical.append(offsetGeometrical[0])
|
||||
azimuthOffsetGeometrical.append(offsetGeometrical[1])
|
||||
|
||||
#offset from cross-correlation
|
||||
if crossCorrelation:
|
||||
offsetMatching = estimateFrameOffset(swath1, swath2, image1, image2, matchingMode=matchingMode)
|
||||
if offsetMatching != None:
|
||||
rangeOffsetMatching.append(offsetMatching[0])
|
||||
azimuthOffsetMatching.append(offsetMatching[1])
|
||||
else:
|
||||
print('******************************************************************')
|
||||
print('WARNING: bad matching offset, we are forced to use')
|
||||
print(' geometrical offset for frame mosaicking')
|
||||
print('******************************************************************')
|
||||
rangeOffsetMatching.append(offsetGeometrical[0])
|
||||
azimuthOffsetMatching.append(offsetGeometrical[1])
|
||||
|
||||
swathDirLast = swathDir
|
||||
|
||||
|
||||
if crossCorrelation:
|
||||
offsetComp = "\n\ncomparision of offsets:\n\n"
|
||||
offsetComp += "offset type i geometrical match difference\n"
|
||||
offsetComp += "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
|
||||
for i, (offset1, offset2) in enumerate(zip(rangeOffsetGeometrical, rangeOffsetMatching)):
|
||||
offsetComp += "range offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2)
|
||||
for i, (offset1, offset2) in enumerate(zip(azimuthOffsetGeometrical, azimuthOffsetMatching)):
|
||||
offsetComp += "azimuth offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2)
|
||||
|
||||
#write and report offsets
|
||||
with open(outputfile, 'w') as f:
|
||||
f.write(offsetComp)
|
||||
print("{}".format(offsetComp))
|
||||
|
||||
|
||||
if crossCorrelation:
|
||||
return (rangeOffsetGeometrical, azimuthOffsetGeometrical, rangeOffsetMatching, azimuthOffsetMatching)
|
||||
else:
|
||||
return (rangeOffsetGeometrical, azimuthOffsetGeometrical)
|
||||
|
||||
|
||||
def computeFrameOffset(swath1, swath2):
|
||||
|
||||
rangeOffset = -(swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize
|
||||
azimuthOffset = -((swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval
|
||||
|
||||
return (rangeOffset, azimuthOffset)
|
||||
|
||||
|
||||
def estimateFrameOffset(swath1, swath2, image1, image2, matchingMode=0):
|
||||
'''
|
||||
estimate offset of two adjacent frames using matching
|
||||
matchingMode: 0: ScanSAR full-aperture image
|
||||
1: regular image
|
||||
'''
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsetsRoipac
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset
|
||||
from mroipac.ampcor.Ampcor import Ampcor
|
||||
|
||||
##########################################
|
||||
#2. match using ampcor
|
||||
##########################################
|
||||
ampcor = Ampcor(name='insarapp_slcs_ampcor')
|
||||
ampcor.configure()
|
||||
|
||||
#mSLC = isceobj.createSlcImage()
|
||||
mSLC = isceobj.createImage()
|
||||
mSLC.load(image1+'.xml')
|
||||
mSLC.setFilename(image1)
|
||||
#mSLC.extraFilename = image1 + '.vrt'
|
||||
mSLC.setAccessMode('read')
|
||||
mSLC.createImage()
|
||||
|
||||
#sSLC = isceobj.createSlcImage()
|
||||
sSLC = isceobj.createImage()
|
||||
sSLC.load(image2+'.xml')
|
||||
sSLC.setFilename(image2)
|
||||
#sSLC.extraFilename = image2 + '.vrt'
|
||||
sSLC.setAccessMode('read')
|
||||
sSLC.createImage()
|
||||
|
||||
if mSLC.dataType.upper() == 'CFLOAT':
|
||||
ampcor.setImageDataType1('complex')
|
||||
ampcor.setImageDataType2('complex')
|
||||
elif mSLC.dataType.upper() == 'FLOAT':
|
||||
ampcor.setImageDataType1('real')
|
||||
ampcor.setImageDataType2('real')
|
||||
else:
|
||||
raise Exception('file type not supported yet.')
|
||||
|
||||
ampcor.setReferenceSlcImage(mSLC)
|
||||
ampcor.setSecondarySlcImage(sSLC)
|
||||
|
||||
#MATCH REGION
|
||||
#compute an offset at image center to use
|
||||
rgoff = -(swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize
|
||||
azoff = -((swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval
|
||||
rgoff = int(rgoff)
|
||||
azoff = int(azoff)
|
||||
#it seems that we cannot use 0, haven't look into the problem
|
||||
if rgoff == 0:
|
||||
rgoff = 1
|
||||
if azoff == 0:
|
||||
azoff = 1
|
||||
firstSample = 1
|
||||
if rgoff < 0:
|
||||
firstSample = int(35 - rgoff)
|
||||
firstLine = 1
|
||||
if azoff < 0:
|
||||
firstLine = int(35 - azoff)
|
||||
ampcor.setAcrossGrossOffset(rgoff)
|
||||
ampcor.setDownGrossOffset(azoff)
|
||||
ampcor.setFirstSampleAcross(firstSample)
|
||||
ampcor.setLastSampleAcross(mSLC.width)
|
||||
ampcor.setNumberLocationAcross(30)
|
||||
ampcor.setFirstSampleDown(firstLine)
|
||||
ampcor.setLastSampleDown(mSLC.length)
|
||||
ampcor.setNumberLocationDown(10)
|
||||
|
||||
#MATCH PARAMETERS
|
||||
#full-aperture mode
|
||||
if matchingMode==0:
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(512)
|
||||
#note this is the half width/length of search area, number of resulting correlation samples: 32*2+1
|
||||
ampcor.setSearchWindowSizeWidth(32)
|
||||
ampcor.setSearchWindowSizeHeight(32)
|
||||
#triggering full-aperture mode matching
|
||||
ampcor.setWinsizeFilt(8)
|
||||
ampcor.setOversamplingFactorFilt(64)
|
||||
#regular mode
|
||||
else:
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(64)
|
||||
ampcor.setSearchWindowSizeWidth(32)
|
||||
ampcor.setSearchWindowSizeHeight(32)
|
||||
|
||||
#REST OF THE STUFF
|
||||
ampcor.setAcrossLooks(1)
|
||||
ampcor.setDownLooks(1)
|
||||
ampcor.setOversamplingFactor(64)
|
||||
ampcor.setZoomWindowSize(16)
|
||||
#1. The following not set
|
||||
#Matching Scale for Sample/Line Directions (-) = 1. 1.
|
||||
#should add the following in Ampcor.py?
|
||||
#if not set, in this case, Ampcor.py'value is also 1. 1.
|
||||
#ampcor.setScaleFactorX(1.)
|
||||
#ampcor.setScaleFactorY(1.)
|
||||
|
||||
#MATCH THRESHOLDS AND DEBUG DATA
|
||||
#2. The following not set
|
||||
#in roi_pac the value is set to 0 1
|
||||
#in isce the value is set to 0.001 1000.0
|
||||
#SNR and Covariance Thresholds (-) = {s1} {s2}
|
||||
#should add the following in Ampcor?
|
||||
#THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC
|
||||
#ampcor.setThresholdSNR(0)
|
||||
#ampcor.setThresholdCov(1)
|
||||
ampcor.setDebugFlag(False)
|
||||
ampcor.setDisplayFlag(False)
|
||||
|
||||
#in summary, only two things not set which are indicated by 'The following not set' above.
|
||||
|
||||
#run ampcor
|
||||
ampcor.ampcor()
|
||||
offsets = ampcor.getOffsetField()
|
||||
#ampcorOffsetFile = 'ampcor.off'
|
||||
#writeOffset(offsets, ampcorOffsetFile)
|
||||
|
||||
#finalize image, and re-create it
|
||||
#otherwise the file pointer is still at the end of the image
|
||||
mSLC.finalizeImage()
|
||||
sSLC.finalizeImage()
|
||||
|
||||
|
||||
#############################################
|
||||
#3. cull offsets
|
||||
#############################################
|
||||
#refinedOffsets = cullOffsets(offsets)
|
||||
refinedOffsets = cullOffsetsRoipac(offsets, numThreshold=50)
|
||||
|
||||
if refinedOffsets != None:
|
||||
rangeOffset, azimuthOffset = meanOffset(refinedOffsets)
|
||||
return (rangeOffset, azimuthOffset)
|
||||
else:
|
||||
return None
|
||||
|
|
@ -0,0 +1,193 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
import isceobj
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runGeo2Rdr')
|
||||
|
||||
def runGeo2Rdr(self):
|
||||
'''compute range and azimuth offsets
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
|
||||
hasGPU= self.useGPU and self._insar.hasGPU()
|
||||
if hasGPU:
|
||||
geo2RdrGPU(secondaryTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.rangeOffset, self._insar.azimuthOffset)
|
||||
else:
|
||||
geo2RdrCPU(secondaryTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.rangeOffset, self._insar.azimuthOffset)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runGeo2Rdr")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def geo2RdrCPU(secondaryTrack, numberRangeLooks, numberAzimuthLooks, latFile, lonFile, hgtFile, rangeOffsetFile, azimuthOffsetFile):
|
||||
import datetime
|
||||
from zerodop.geo2rdr import createGeo2rdr
|
||||
from isceobj.Planet.Planet import Planet
|
||||
|
||||
pointingDirection = {'right': -1, 'left' :1}
|
||||
|
||||
latImage = isceobj.createImage()
|
||||
latImage.load(latFile + '.xml')
|
||||
latImage.setAccessMode('read')
|
||||
|
||||
lonImage = isceobj.createImage()
|
||||
lonImage.load(lonFile + '.xml')
|
||||
lonImage.setAccessMode('read')
|
||||
|
||||
demImage = isceobj.createDemImage()
|
||||
demImage.load(hgtFile + '.xml')
|
||||
demImage.setAccessMode('read')
|
||||
|
||||
planet = Planet(pname='Earth')
|
||||
|
||||
topo = createGeo2rdr()
|
||||
topo.configure()
|
||||
#set parameters
|
||||
topo.slantRangePixelSpacing = numberRangeLooks * secondaryTrack.rangePixelSize
|
||||
topo.prf = 1.0 / (numberAzimuthLooks*secondaryTrack.azimuthLineInterval)
|
||||
topo.radarWavelength = secondaryTrack.radarWavelength
|
||||
topo.orbit = secondaryTrack.orbit
|
||||
topo.width = secondaryTrack.numberOfSamples
|
||||
topo.length = secondaryTrack.numberOfLines
|
||||
topo.demLength = demImage.length
|
||||
topo.demWidth = demImage.width
|
||||
topo.wireInputPort(name='planet', object=planet)
|
||||
topo.numberRangeLooks = 1 #
|
||||
topo.numberAzimuthLooks = 1 # must be set to be 1
|
||||
topo.lookSide = pointingDirection[secondaryTrack.pointingDirection]
|
||||
topo.setSensingStart(secondaryTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*secondaryTrack.azimuthLineInterval))
|
||||
topo.rangeFirstSample = secondaryTrack.startingRange + (numberRangeLooks-1.0)/2.0*secondaryTrack.rangePixelSize
|
||||
topo.dopplerCentroidCoeffs = [0.] # we are using zero doppler geometry
|
||||
#set files
|
||||
topo.latImage = latImage
|
||||
topo.lonImage = lonImage
|
||||
topo.demImage = demImage
|
||||
topo.rangeOffsetImageName = rangeOffsetFile
|
||||
topo.azimuthOffsetImageName = azimuthOffsetFile
|
||||
#run it
|
||||
topo.geo2rdr()
|
||||
|
||||
return
|
||||
|
||||
|
||||
def geo2RdrGPU(secondaryTrack, numberRangeLooks, numberAzimuthLooks, latFile, lonFile, hgtFile, rangeOffsetFile, azimuthOffsetFile):
|
||||
'''
|
||||
currently we cannot set left/right looking.
|
||||
works for right looking, but left looking probably not supported.
|
||||
'''
|
||||
|
||||
import datetime
|
||||
from zerodop.GPUgeo2rdr.GPUgeo2rdr import PyGeo2rdr
|
||||
from isceobj.Planet.Planet import Planet
|
||||
from iscesys import DateTimeUtil as DTU
|
||||
|
||||
latImage = isceobj.createImage()
|
||||
latImage.load(latFile + '.xml')
|
||||
latImage.setAccessMode('READ')
|
||||
latImage.createImage()
|
||||
|
||||
lonImage = isceobj.createImage()
|
||||
lonImage.load(lonFile + '.xml')
|
||||
lonImage.setAccessMode('READ')
|
||||
lonImage.createImage()
|
||||
|
||||
demImage = isceobj.createImage()
|
||||
demImage.load(hgtFile + '.xml')
|
||||
demImage.setAccessMode('READ')
|
||||
demImage.createImage()
|
||||
|
||||
#####Run Geo2rdr
|
||||
planet = Planet(pname='Earth')
|
||||
grdr = PyGeo2rdr()
|
||||
|
||||
grdr.setRangePixelSpacing(numberRangeLooks * secondaryTrack.rangePixelSize)
|
||||
grdr.setPRF(1.0 / (numberAzimuthLooks*secondaryTrack.azimuthLineInterval))
|
||||
grdr.setRadarWavelength(secondaryTrack.radarWavelength)
|
||||
|
||||
#CHECK IF THIS WORKS!!!
|
||||
grdr.createOrbit(0, len(secondaryTrack.orbit.stateVectors.list))
|
||||
count = 0
|
||||
for sv in secondaryTrack.orbit.stateVectors.list:
|
||||
td = DTU.seconds_since_midnight(sv.getTime())
|
||||
pos = sv.getPosition()
|
||||
vel = sv.getVelocity()
|
||||
|
||||
grdr.setOrbitVector(count, td, pos[0], pos[1], pos[2], vel[0], vel[1], vel[2])
|
||||
count += 1
|
||||
|
||||
grdr.setOrbitMethod(0)
|
||||
grdr.setWidth(secondaryTrack.numberOfSamples)
|
||||
grdr.setLength(secondaryTrack.numberOfLines)
|
||||
grdr.setSensingStart(DTU.seconds_since_midnight(secondaryTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*secondaryTrack.azimuthLineInterval)))
|
||||
grdr.setRangeFirstSample(secondaryTrack.startingRange + (numberRangeLooks-1.0)/2.0*secondaryTrack.rangePixelSize)
|
||||
grdr.setNumberRangeLooks(1)
|
||||
grdr.setNumberAzimuthLooks(1)
|
||||
grdr.setEllipsoidMajorSemiAxis(planet.ellipsoid.a)
|
||||
grdr.setEllipsoidEccentricitySquared(planet.ellipsoid.e2)
|
||||
|
||||
|
||||
grdr.createPoly(0, 0., 1.)
|
||||
grdr.setPolyCoeff(0, 0.)
|
||||
|
||||
grdr.setDemLength(demImage.getLength())
|
||||
grdr.setDemWidth(demImage.getWidth())
|
||||
grdr.setBistaticFlag(0)
|
||||
|
||||
rangeOffsetImage = isceobj.createImage()
|
||||
rangeOffsetImage.setFilename(rangeOffsetFile)
|
||||
rangeOffsetImage.setAccessMode('write')
|
||||
rangeOffsetImage.setDataType('FLOAT')
|
||||
rangeOffsetImage.setCaster('write', 'DOUBLE')
|
||||
rangeOffsetImage.setWidth(demImage.width)
|
||||
rangeOffsetImage.createImage()
|
||||
|
||||
azimuthOffsetImage = isceobj.createImage()
|
||||
azimuthOffsetImage.setFilename(azimuthOffsetFile)
|
||||
azimuthOffsetImage.setAccessMode('write')
|
||||
azimuthOffsetImage.setDataType('FLOAT')
|
||||
azimuthOffsetImage.setCaster('write', 'DOUBLE')
|
||||
azimuthOffsetImage.setWidth(demImage.width)
|
||||
azimuthOffsetImage.createImage()
|
||||
|
||||
grdr.setLatAccessor(latImage.getImagePointer())
|
||||
grdr.setLonAccessor(lonImage.getImagePointer())
|
||||
grdr.setHgtAccessor(demImage.getImagePointer())
|
||||
grdr.setAzAccessor(0)
|
||||
grdr.setRgAccessor(0)
|
||||
grdr.setAzOffAccessor(azimuthOffsetImage.getImagePointer())
|
||||
grdr.setRgOffAccessor(rangeOffsetImage.getImagePointer())
|
||||
|
||||
grdr.geo2rdr()
|
||||
|
||||
rangeOffsetImage.finalizeImage()
|
||||
rangeOffsetImage.renderHdr()
|
||||
|
||||
azimuthOffsetImage.finalizeImage()
|
||||
azimuthOffsetImage.renderHdr()
|
||||
latImage.finalizeImage()
|
||||
lonImage.finalizeImage()
|
||||
demImage.finalizeImage()
|
||||
|
||||
return
|
||||
|
|
@ -0,0 +1,130 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runGeocode')
|
||||
|
||||
def runGeocode(self):
|
||||
'''geocode final products
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
#secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
demFile = os.path.abspath(self._insar.demGeo)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
#compute bounding box for geocoding
|
||||
if self.bbox == None:
|
||||
bbox = getBboxGeo(referenceTrack)
|
||||
else:
|
||||
bbox = self.bbox
|
||||
catalog.addItem('geocode bounding box', bbox, 'runGeocode')
|
||||
|
||||
if self.geocodeList == None:
|
||||
geocodeList = [self._insar.unwrappedInterferogram,
|
||||
self._insar.unwrappedMaskedInterferogram,
|
||||
self._insar.multilookCoherence,
|
||||
self._insar.multilookLos]
|
||||
if self.doIon:
|
||||
geocodeList.append(self._insar.multilookIon)
|
||||
else:
|
||||
geocodeList = []
|
||||
for xxx in self.geocodeList:
|
||||
geocodeList += glob.glob(xxx)
|
||||
|
||||
numberRangeLooks = self._insar.numberRangeLooks1 * self._insar.numberRangeLooks2
|
||||
numberAzimuthLooks = self._insar.numberAzimuthLooks1 * self._insar.numberAzimuthLooks2
|
||||
|
||||
for inputFile in geocodeList:
|
||||
if self.geocodeInterpMethod == None:
|
||||
img = isceobj.createImage()
|
||||
img.load(inputFile + '.xml')
|
||||
if img.dataType.upper() == 'CFLOAT':
|
||||
interpMethod = 'sinc'
|
||||
else:
|
||||
interpMethod = 'bilinear'
|
||||
else:
|
||||
interpMethod = self.geocodeInterpMethod.lower()
|
||||
|
||||
geocode(referenceTrack, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, 0, 0)
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runGeocode")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def geocode(track, demFile, inputFile, bbox, numberRangeLooks, numberAzimuthLooks, interpMethod, topShift, leftShift, addMultilookOffset=True):
|
||||
import datetime
|
||||
from zerodop.geozero import createGeozero
|
||||
from isceobj.Planet.Planet import Planet
|
||||
|
||||
pointingDirection = {'right': -1, 'left' :1}
|
||||
|
||||
demImage = isceobj.createDemImage()
|
||||
demImage.load(demFile + '.xml')
|
||||
demImage.setAccessMode('read')
|
||||
|
||||
inImage = isceobj.createImage()
|
||||
inImage.load(inputFile + '.xml')
|
||||
inImage.setAccessMode('read')
|
||||
|
||||
planet = Planet(pname='Earth')
|
||||
|
||||
topo = createGeozero()
|
||||
topo.configure()
|
||||
topo.slantRangePixelSpacing = numberRangeLooks * track.rangePixelSize
|
||||
topo.prf = 1.0 / (numberAzimuthLooks*track.azimuthLineInterval)
|
||||
topo.radarWavelength = track.radarWavelength
|
||||
topo.orbit = track.orbit
|
||||
topo.width = inImage.width
|
||||
topo.length = inImage.length
|
||||
topo.wireInputPort(name='dem', object=demImage)
|
||||
topo.wireInputPort(name='planet', object=planet)
|
||||
topo.wireInputPort(name='tobegeocoded', object=inImage)
|
||||
topo.numberRangeLooks = 1
|
||||
topo.numberAzimuthLooks = 1
|
||||
topo.lookSide = pointingDirection[track.pointingDirection]
|
||||
sensingStart = track.sensingStart + datetime.timedelta(seconds=topShift*track.azimuthLineInterval)
|
||||
rangeFirstSample = track.startingRange + leftShift * track.rangePixelSize
|
||||
if addMultilookOffset:
|
||||
sensingStart += datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*track.azimuthLineInterval)
|
||||
rangeFirstSample += (numberRangeLooks-1.0)/2.0*track.rangePixelSize
|
||||
topo.setSensingStart(sensingStart)
|
||||
topo.rangeFirstSample = rangeFirstSample
|
||||
topo.method=interpMethod
|
||||
topo.demCropFilename = 'crop.dem'
|
||||
#looks like this does not work
|
||||
#topo.geoFilename = outputName
|
||||
topo.dopplerCentroidCoeffs = [0.]
|
||||
#snwe list <class 'list'>
|
||||
topo.snwe = bbox
|
||||
|
||||
topo.geocode()
|
||||
|
||||
print('South: ', topo.minimumGeoLatitude)
|
||||
print('North: ', topo.maximumGeoLatitude)
|
||||
print('West: ', topo.minimumGeoLongitude)
|
||||
print('East: ', topo.maximumGeoLongitude)
|
||||
|
||||
return
|
||||
|
|
@ -0,0 +1,63 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.runGeocode import geocode
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runGeocodeOffset')
|
||||
|
||||
def runGeocodeOffset(self):
|
||||
'''geocode offset fied
|
||||
'''
|
||||
if not self.doDenseOffset:
|
||||
return
|
||||
if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)):
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
#use original track object to determine bbox
|
||||
if self.bbox == None:
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
bbox = getBboxGeo(referenceTrack)
|
||||
else:
|
||||
bbox = self.bbox
|
||||
catalog.addItem('geocode bounding box', bbox, 'runGeocodeOffset')
|
||||
|
||||
demFile = os.path.abspath(self._insar.demGeo)
|
||||
|
||||
denseOffsetDir = 'dense_offset'
|
||||
os.makedirs(denseOffsetDir, exist_ok=True)
|
||||
os.chdir(denseOffsetDir)
|
||||
|
||||
referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter)
|
||||
#secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter)
|
||||
|
||||
#########################################################################################
|
||||
#compute bounding box for geocoding
|
||||
#if self.bbox == None:
|
||||
# bbox = getBboxGeo(referenceTrack)
|
||||
#else:
|
||||
# bbox = self.bbox
|
||||
#catalog.addItem('geocode bounding box', bbox, 'runGeocodeOffset')
|
||||
|
||||
geocodeList = [self._insar.denseOffset, self._insar.denseOffsetSnr]
|
||||
if self.doOffsetFiltering:
|
||||
geocodeList.append(self._insar.denseOffsetFilt)
|
||||
|
||||
for inputFile in geocodeList:
|
||||
interpMethod = 'nearest'
|
||||
geocode(referenceTrack, demFile, inputFile, bbox, self.offsetSkipWidth, self.offsetSkipHeight, interpMethod, self._insar.offsetImageTopoffset, self._insar.offsetImageLeftoffset, addMultilookOffset=False)
|
||||
#########################################################################################
|
||||
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runGeocodeOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
|
@ -0,0 +1,150 @@
|
|||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
import numpy.matlib
|
||||
|
||||
import isceobj
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runIonCorrect')
|
||||
|
||||
def runIonCorrect(self):
|
||||
'''resample original ionosphere and ionospheric correction
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
if not self.doIon:
|
||||
catalog.printToLog(logger, "runIonCorrect")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
return
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
from isceobj.Alos2Proc.runIonSubband import defineIonDir
|
||||
ionDir = defineIonDir()
|
||||
subbandPrefix = ['lower', 'upper']
|
||||
|
||||
ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal'])
|
||||
os.makedirs(ionCalDir, exist_ok=True)
|
||||
os.chdir(ionCalDir)
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 3. resample ionospheric phase
|
||||
############################################################
|
||||
from contrib.alos2proc_f.alos2proc_f import rect
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from scipy.interpolate import interp1d
|
||||
import shutil
|
||||
|
||||
#################################################
|
||||
#SET PARAMETERS HERE
|
||||
#interpolation method
|
||||
interpolationMethod = 1
|
||||
#################################################
|
||||
|
||||
print('\ninterpolate ionosphere')
|
||||
|
||||
ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
|
||||
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon)
|
||||
|
||||
ml3 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooks2,
|
||||
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2)
|
||||
|
||||
ionfiltfile = 'filt_ion'+ml2+'.ion'
|
||||
#ionrectfile = 'filt_ion'+ml3+'.ion'
|
||||
ionrectfile = self._insar.multilookIon
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(ionfiltfile + '.xml')
|
||||
width2 = img.width
|
||||
length2 = img.length
|
||||
|
||||
img = isceobj.createImage()
|
||||
img.load(os.path.join('../../', ionDir['insar'], self._insar.multilookDifferentialInterferogram) + '.xml')
|
||||
width3 = img.width
|
||||
length3 = img.length
|
||||
|
||||
#number of range looks output
|
||||
nrlo = self._insar.numberRangeLooks1*self._insar.numberRangeLooks2
|
||||
#number of range looks input
|
||||
nrli = self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon
|
||||
#number of azimuth looks output
|
||||
nalo = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooks2
|
||||
#number of azimuth looks input
|
||||
nali = self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon
|
||||
|
||||
if (self._insar.numberRangeLooks2 != self._insar.numberRangeLooksIon) or \
|
||||
(self._insar.numberAzimuthLooks2 != self._insar.numberAzimuthLooksIon):
|
||||
#this should be faster using fortran
|
||||
if interpolationMethod == 0:
|
||||
rect(ionfiltfile, ionrectfile,
|
||||
width2,length2,
|
||||
width3,length3,
|
||||
nrlo/nrli, 0.0,
|
||||
0.0, nalo/nali,
|
||||
(nrlo-nrli)/(2.0*nrli),
|
||||
(nalo-nali)/(2.0*nali),
|
||||
'REAL','Bilinear')
|
||||
#finer, but slower method
|
||||
else:
|
||||
ionfilt = np.fromfile(ionfiltfile, dtype=np.float32).reshape(length2, width2)
|
||||
index2 = np.linspace(0, width2-1, num=width2, endpoint=True)
|
||||
index3 = np.linspace(0, width3-1, num=width3, endpoint=True) * nrlo/nrli + (nrlo-nrli)/(2.0*nrli)
|
||||
ionrect = np.zeros((length3, width3), dtype=np.float32)
|
||||
for i in range(length2):
|
||||
f = interp1d(index2, ionfilt[i,:], kind='cubic', fill_value="extrapolate")
|
||||
ionrect[i, :] = f(index3)
|
||||
|
||||
index2 = np.linspace(0, length2-1, num=length2, endpoint=True)
|
||||
index3 = np.linspace(0, length3-1, num=length3, endpoint=True) * nalo/nali + (nalo-nali)/(2.0*nali)
|
||||
for j in range(width3):
|
||||
f = interp1d(index2, ionrect[0:length2, j], kind='cubic', fill_value="extrapolate")
|
||||
ionrect[:, j] = f(index3)
|
||||
ionrect.astype(np.float32).tofile(ionrectfile)
|
||||
del ionrect
|
||||
create_xml(ionrectfile, width3, length3, 'float')
|
||||
|
||||
os.rename(ionrectfile, os.path.join('../../insar', ionrectfile))
|
||||
os.rename(ionrectfile+'.vrt', os.path.join('../../insar', ionrectfile)+'.vrt')
|
||||
os.rename(ionrectfile+'.xml', os.path.join('../../insar', ionrectfile)+'.xml')
|
||||
os.chdir('../../insar')
|
||||
else:
|
||||
shutil.copyfile(ionfiltfile, os.path.join('../../insar', ionrectfile))
|
||||
os.chdir('../../insar')
|
||||
create_xml(ionrectfile, width3, length3, 'float')
|
||||
#now we are in 'insar'
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 4. correct interferogram
|
||||
############################################################
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import renameFile
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
if self.applyIon:
|
||||
print('\ncorrect interferogram')
|
||||
if os.path.isfile(self._insar.multilookDifferentialInterferogramOriginal):
|
||||
print('original interferogram: {} is already here, do not rename: {}'.format(self._insar.multilookDifferentialInterferogramOriginal, self._insar.multilookDifferentialInterferogram))
|
||||
else:
|
||||
print('renaming {} to {}'.format(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal))
|
||||
renameFile(self._insar.multilookDifferentialInterferogram, self._insar.multilookDifferentialInterferogramOriginal)
|
||||
|
||||
cmd = "imageMath.py -e='a*exp(-1.0*J*b)' --a={} --b={} -s BIP -t cfloat -o {}".format(
|
||||
self._insar.multilookDifferentialInterferogramOriginal,
|
||||
self._insar.multilookIon,
|
||||
self._insar.multilookDifferentialInterferogram)
|
||||
runCmd(cmd)
|
||||
else:
|
||||
print('\nionospheric phase estimation finished, but correction of interfeorgram not requested')
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runIonCorrect")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
|
@ -0,0 +1,757 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
import numpy.matlib
|
||||
|
||||
import isceobj
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runIonFilt')
|
||||
|
||||
def runIonFilt(self):
|
||||
'''compute and filter ionospheric phase
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
if not self.doIon:
|
||||
catalog.printToLog(logger, "runIonFilt")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
return
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
from isceobj.Alos2Proc.runIonSubband import defineIonDir
|
||||
ionDir = defineIonDir()
|
||||
subbandPrefix = ['lower', 'upper']
|
||||
|
||||
ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal'])
|
||||
os.makedirs(ionCalDir, exist_ok=True)
|
||||
os.chdir(ionCalDir)
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 1. compute ionospheric phase
|
||||
############################################################
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
###################################
|
||||
#SET PARAMETERS HERE
|
||||
#THESE SHOULD BE GOOD ENOUGH, NO NEED TO SET IN setup(self)
|
||||
corThresholdAdj = 0.97
|
||||
corOrderAdj = 20
|
||||
###################################
|
||||
|
||||
print('\ncomputing ionosphere')
|
||||
#get files
|
||||
ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
|
||||
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon)
|
||||
|
||||
lowerUnwfile = subbandPrefix[0]+ml2+'.unw'
|
||||
upperUnwfile = subbandPrefix[1]+ml2+'.unw'
|
||||
corfile = 'diff'+ml2+'.cor'
|
||||
|
||||
#use image size from lower unwrapped interferogram
|
||||
img = isceobj.createImage()
|
||||
img.load(lowerUnwfile + '.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
lowerUnw = (np.fromfile(lowerUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
upperUnw = (np.fromfile(upperUnwfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
cor = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
#amp = (np.fromfile(corfile, dtype=np.float32).reshape(length*2, width))[0:length*2:2, :]
|
||||
|
||||
#masked out user-specified areas
|
||||
if self.maskedAreasIon != None:
|
||||
maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width)
|
||||
for area in maskedAreas:
|
||||
lowerUnw[area[0]:area[1], area[2]:area[3]] = 0
|
||||
upperUnw[area[0]:area[1], area[2]:area[3]] = 0
|
||||
cor[area[0]:area[1], area[2]:area[3]] = 0
|
||||
|
||||
#remove possible wired values in coherence
|
||||
cor[np.nonzero(cor<0)] = 0.0
|
||||
cor[np.nonzero(cor>1)] = 0.0
|
||||
|
||||
#remove water body
|
||||
wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width)
|
||||
cor[np.nonzero(wbd==-1)] = 0.0
|
||||
|
||||
#remove small values
|
||||
cor[np.nonzero(cor<corThresholdAdj)] = 0.0
|
||||
|
||||
#compute ionosphere
|
||||
fl = SPEED_OF_LIGHT / self._insar.subbandRadarWavelength[0]
|
||||
fu = SPEED_OF_LIGHT / self._insar.subbandRadarWavelength[1]
|
||||
adjFlag = 1
|
||||
ionos = computeIonosphere(lowerUnw, upperUnw, cor**corOrderAdj, fl, fu, adjFlag, 0)
|
||||
|
||||
#dump ionosphere
|
||||
ionfile = 'ion'+ml2+'.ion'
|
||||
# ion = np.zeros((length*2, width), dtype=np.float32)
|
||||
# ion[0:length*2:2, :] = amp
|
||||
# ion[1:length*2:2, :] = ionos
|
||||
# ion.astype(np.float32).tofile(ionfile)
|
||||
# img.filename = ionfile
|
||||
# img.extraFilename = ionfile + '.vrt'
|
||||
# img.renderHdr()
|
||||
|
||||
ionos.astype(np.float32).tofile(ionfile)
|
||||
create_xml(ionfile, width, length, 'float')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 2. filter ionospheric phase
|
||||
############################################################
|
||||
import scipy.signal as ss
|
||||
|
||||
#################################################
|
||||
#SET PARAMETERS HERE
|
||||
#fit and filter ionosphere
|
||||
fit = self.fitIon
|
||||
filt = self.filtIon
|
||||
fitAdaptive = self.fitAdaptiveIon
|
||||
filtSecondary = self.filtSecondaryIon
|
||||
if (fit == False) and (filt == False):
|
||||
raise Exception('either fit ionosphere or filt ionosphere should be True when doing ionospheric correction\n')
|
||||
|
||||
#filtering window size
|
||||
size_max = self.filteringWinsizeMaxIon
|
||||
size_min = self.filteringWinsizeMinIon
|
||||
size_secondary = self.filteringWinsizeSecondaryIon
|
||||
if size_min > size_max:
|
||||
print('\n\nWARNING: minimum window size for filtering ionosphere phase {} > maximum window size {}'.format(size_min, size_max))
|
||||
print(' re-setting maximum window size to {}\n\n'.format(size_min))
|
||||
size_max = size_min
|
||||
if size_secondary % 2 != 1:
|
||||
size_secondary += 1
|
||||
print('window size of secondary filtering of ionosphere phase should be odd, window size changed to {}'.format(size_secondary))
|
||||
|
||||
#coherence threshold for fitting a polynomial
|
||||
corThresholdFit = 0.25
|
||||
|
||||
#ionospheric phase standard deviation after filtering
|
||||
if self.filterStdIon is not None:
|
||||
std_out0 = self.filterStdIon
|
||||
else:
|
||||
if referenceTrack.operationMode == secondaryTrack.operationMode:
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict
|
||||
std_out0 = modeProcParDict['ALOS-2'][referenceTrack.operationMode]['filterStdIon']
|
||||
else:
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import filterStdPolyIon
|
||||
std_out0 = np.polyval(filterStdPolyIon, referenceTrack.frames[0].swaths[0].rangeBandwidth/(1e6))
|
||||
#std_out0 = 0.1
|
||||
#################################################
|
||||
|
||||
print('\nfiltering ionosphere')
|
||||
|
||||
#input files
|
||||
ionfile = 'ion'+ml2+'.ion'
|
||||
#corfile = 'diff'+ml2+'.cor'
|
||||
corLowerfile = subbandPrefix[0]+ml2+'.cor'
|
||||
corUpperfile = subbandPrefix[1]+ml2+'.cor'
|
||||
#output files
|
||||
ionfiltfile = 'filt_ion'+ml2+'.ion'
|
||||
stdfiltfile = 'filt_ion'+ml2+'.std'
|
||||
windowsizefiltfile = 'filt_ion'+ml2+'.win'
|
||||
|
||||
#read data
|
||||
img = isceobj.createImage()
|
||||
img.load(ionfile + '.xml')
|
||||
width = img.width
|
||||
length = img.length
|
||||
|
||||
ion = np.fromfile(ionfile, dtype=np.float32).reshape(length, width)
|
||||
corLower = (np.fromfile(corLowerfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
corUpper = (np.fromfile(corUpperfile, dtype=np.float32).reshape(length*2, width))[1:length*2:2, :]
|
||||
cor = (corLower + corUpper) / 2.0
|
||||
index = np.nonzero(np.logical_or(corLower==0, corUpper==0))
|
||||
cor[index] = 0
|
||||
del corLower, corUpper
|
||||
|
||||
#masked out user-specified areas
|
||||
if self.maskedAreasIon != None:
|
||||
maskedAreas = reformatMaskedAreas(self.maskedAreasIon, length, width)
|
||||
for area in maskedAreas:
|
||||
ion[area[0]:area[1], area[2]:area[3]] = 0
|
||||
cor[area[0]:area[1], area[2]:area[3]] = 0
|
||||
|
||||
#remove possible wired values in coherence
|
||||
cor[np.nonzero(cor<0)] = 0.0
|
||||
cor[np.nonzero(cor>1)] = 0.0
|
||||
|
||||
#remove water body. Not helpful, just leave it here
|
||||
wbd = np.fromfile('wbd'+ml2+'.wbd', dtype=np.int8).reshape(length, width)
|
||||
cor[np.nonzero(wbd==-1)] = 0.0
|
||||
|
||||
# #applying water body mask here
|
||||
# waterBodyFile = 'wbd'+ml2+'.wbd'
|
||||
# if os.path.isfile(waterBodyFile):
|
||||
# print('applying water body mask to coherence used to compute ionospheric phase')
|
||||
# wbd = np.fromfile(waterBodyFile, dtype=np.int8).reshape(length, width)
|
||||
# cor[np.nonzero(wbd!=0)] = 0.00001
|
||||
|
||||
#minimize the effect of low coherence pixels
|
||||
#cor[np.nonzero( (cor<0.85)*(cor!=0) )] = 0.00001
|
||||
#filt = adaptive_gaussian(ion, cor, size_max, size_min)
|
||||
#cor**14 should be a good weight to use. 22-APR-2018
|
||||
#filt = adaptive_gaussian_v0(ion, cor**corOrderFilt, size_max, size_min)
|
||||
|
||||
|
||||
#1. compute number of looks
|
||||
azimuthBandwidth = 0
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
#azimuthBandwidth += 2270.575 * 0.85
|
||||
azimuthBandwidth += referenceTrack.frames[i].swaths[j].azimuthBandwidth
|
||||
azimuthBandwidth = azimuthBandwidth / (len(self._insar.referenceFrames)*(self._insar.endingSwath-self._insar.startingSwath+1))
|
||||
|
||||
#azimuth number of looks should also apply to burst mode
|
||||
#assume range bandwidth of subband image is 1/3 of orginal range bandwidth, as in runIonSubband.py!!!
|
||||
numberOfLooks = referenceTrack.azimuthLineInterval * self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon / (1.0/azimuthBandwidth) *\
|
||||
referenceTrack.frames[0].swaths[0].rangeBandwidth / 3.0 / referenceTrack.rangeSamplingRate * self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon
|
||||
|
||||
#consider also burst characteristics. In ScanSAR-stripmap interferometry, azimuthBandwidth is from referenceTrack (ScanSAR)
|
||||
if self._insar.modeCombination in [21, 31]:
|
||||
numberOfLooks /= 5.0
|
||||
if self._insar.modeCombination in [22, 32]:
|
||||
numberOfLooks /= 7.0
|
||||
if self._insar.modeCombination in [21]:
|
||||
numberOfLooks *= (self._insar.burstSynchronization/100.0)
|
||||
|
||||
#numberOfLooks checked
|
||||
print('number of looks to be used for computing subband interferogram standard deviation: {}'.format(numberOfLooks))
|
||||
catalog.addItem('number of looks of subband interferograms', numberOfLooks, 'runIonFilt')
|
||||
|
||||
|
||||
#2. compute standard deviation of the raw ionospheric phase
|
||||
#f0 same as in runIonSubband.py!!!
|
||||
def ion_std(fl, fu, numberOfLooks, cor):
|
||||
'''
|
||||
compute standard deviation of ionospheric phase
|
||||
fl: lower band center frequency
|
||||
fu: upper band center frequency
|
||||
cor: coherence, must be numpy array
|
||||
'''
|
||||
f0 = (fl + fu) / 2.0
|
||||
interferogramVar = (1.0 - cor**2) / (2.0 * numberOfLooks * cor**2 + (cor==0))
|
||||
std = fl*fu/f0/(fu**2-fl**2)*np.sqrt(fu**2*interferogramVar+fl**2*interferogramVar)
|
||||
std[np.nonzero(cor==0)] = 0
|
||||
return std
|
||||
std = ion_std(fl, fu, numberOfLooks, cor)
|
||||
|
||||
|
||||
#3. compute minimum filter window size for given coherence and standard deviation of filtered ionospheric phase
|
||||
cor2 = np.linspace(0.1, 0.9, num=9, endpoint=True)
|
||||
std2 = ion_std(fl, fu, numberOfLooks, cor2)
|
||||
std_out2 = np.zeros(cor2.size)
|
||||
win2 = np.zeros(cor2.size, dtype=np.int32)
|
||||
for i in range(cor2.size):
|
||||
for size in range(9, 10001, 2):
|
||||
#this window must be the same as those used in adaptive_gaussian!!!
|
||||
gw = gaussian(size, size/2.0, scale=1.0)
|
||||
scale = 1.0 / np.sum(gw / std2[i]**2)
|
||||
std_out2[i] = scale * np.sqrt(np.sum(gw**2 / std2[i]**2))
|
||||
win2[i] = size
|
||||
if std_out2[i] <= std_out0:
|
||||
break
|
||||
print('if ionospheric phase standard deviation <= {} rad, minimum filtering window size required:'.format(std_out0))
|
||||
print('coherence window size')
|
||||
print('************************')
|
||||
for x, y in zip(cor2, win2):
|
||||
print(' %5.2f %5d'%(x, y))
|
||||
print()
|
||||
catalog.addItem('coherence value', cor2, 'runIonFilt')
|
||||
catalog.addItem('minimum filter window size', win2, 'runIonFilt')
|
||||
|
||||
|
||||
#4. filter interferogram
|
||||
#fit ionosphere
|
||||
if fit:
|
||||
#prepare weight
|
||||
wgt = std**2
|
||||
wgt[np.nonzero(cor<corThresholdFit)] = 0
|
||||
index = np.nonzero(wgt!=0)
|
||||
wgt[index] = 1.0/(wgt[index])
|
||||
#fit
|
||||
ion_fit, coeff = polyfit_2d(ion, wgt, 2)
|
||||
ion -= ion_fit * (ion!=0)
|
||||
#filter the rest of the ionosphere
|
||||
if filt:
|
||||
(ion_filt, std_out, window_size_out) = adaptive_gaussian(ion, std, size_min, size_max, std_out0, fit=fitAdaptive)
|
||||
if filtSecondary:
|
||||
print('applying secondary filtering with window size {}'.format(size_secondary))
|
||||
g2d = gaussian(size_secondary, size_secondary/2.0, scale=1.0)
|
||||
scale = ss.fftconvolve((ion_filt!=0), g2d, mode='same')
|
||||
ion_filt = (ion_filt!=0) * ss.fftconvolve(ion_filt, g2d, mode='same') / (scale + (scale==0))
|
||||
catalog.addItem('standard deviation of filtered ionospheric phase', std_out0, 'runIonFilt')
|
||||
|
||||
#get final results
|
||||
if (fit == True) and (filt == True):
|
||||
ion_final = ion_filt + ion_fit * (ion_filt!=0)
|
||||
elif (fit == True) and (filt == False):
|
||||
ion_final = ion_fit
|
||||
elif (fit == False) and (filt == True):
|
||||
ion_final = ion_filt
|
||||
else:
|
||||
ion_final = ion
|
||||
|
||||
#output results
|
||||
ion_final.astype(np.float32).tofile(ionfiltfile)
|
||||
create_xml(ionfiltfile, width, length, 'float')
|
||||
if filt == True:
|
||||
std_out.astype(np.float32).tofile(stdfiltfile)
|
||||
create_xml(stdfiltfile, width, length, 'float')
|
||||
window_size_out.astype(np.float32).tofile(windowsizefiltfile)
|
||||
create_xml(windowsizefiltfile, width, length, 'float')
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
catalog.printToLog(logger, "runIonFilt")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
||||
def computeIonosphere(lowerUnw, upperUnw, wgt, fl, fu, adjFlag, dispersive):
|
||||
'''
|
||||
This routine computes ionosphere and remove the relative phase unwrapping errors
|
||||
|
||||
lowerUnw: lower band unwrapped interferogram
|
||||
upperUnw: upper band unwrapped interferogram
|
||||
wgt: weight
|
||||
fl: lower band center frequency
|
||||
fu: upper band center frequency
|
||||
adjFlag: method for removing relative phase unwrapping errors
|
||||
0: mean value
|
||||
1: polynomial
|
||||
dispersive: compute dispersive or non-dispersive
|
||||
0: dispersive
|
||||
1: non-dispersive
|
||||
'''
|
||||
|
||||
#use image size from lower unwrapped interferogram
|
||||
(length, width)=lowerUnw.shape
|
||||
|
||||
##########################################################################################
|
||||
# ADJUST PHASE USING MEAN VALUE
|
||||
# #ajust phase of upper band to remove relative phase unwrapping errors
|
||||
# flag = (lowerUnw!=0)*(cor>=ionParam.corThresholdAdj)
|
||||
# index = np.nonzero(flag!=0)
|
||||
# mv = np.mean((lowerUnw - upperUnw)[index], dtype=np.float64)
|
||||
# print('mean value of phase difference: {}'.format(mv))
|
||||
# flag2 = (lowerUnw!=0)
|
||||
# index2 = np.nonzero(flag2)
|
||||
# #phase for adjustment
|
||||
# unwd = ((lowerUnw - upperUnw)[index2] - mv) / (2.0*np.pi)
|
||||
# unw_adj = np.around(unwd) * (2.0*np.pi)
|
||||
# #ajust phase of upper band
|
||||
# upperUnw[index2] += unw_adj
|
||||
# unw_diff = lowerUnw - upperUnw
|
||||
# print('after adjustment:')
|
||||
# print('max phase difference: {}'.format(np.amax(unw_diff)))
|
||||
# print('min phase difference: {}'.format(np.amin(unw_diff)))
|
||||
##########################################################################################
|
||||
#adjust phase using mean value
|
||||
if adjFlag == 0:
|
||||
flag = (lowerUnw!=0)*(wgt!=0)
|
||||
index = np.nonzero(flag!=0)
|
||||
mv = np.mean((lowerUnw - upperUnw)[index], dtype=np.float64)
|
||||
print('mean value of phase difference: {}'.format(mv))
|
||||
diff = mv
|
||||
#adjust phase using a surface
|
||||
else:
|
||||
#diff = weight_fitting(lowerUnw - upperUnw, wgt, width, length, 1, 1, 1, 1, 2)
|
||||
diff, coeff = polyfit_2d(lowerUnw - upperUnw, wgt, 2)
|
||||
|
||||
flag2 = (lowerUnw!=0)
|
||||
index2 = np.nonzero(flag2)
|
||||
#phase for adjustment
|
||||
unwd = ((lowerUnw - upperUnw) - diff)[index2] / (2.0*np.pi)
|
||||
unw_adj = np.around(unwd) * (2.0*np.pi)
|
||||
#ajust phase of upper band
|
||||
upperUnw[index2] += unw_adj
|
||||
|
||||
unw_diff = (lowerUnw - upperUnw)[index2]
|
||||
print('after adjustment:')
|
||||
print('max phase difference: {}'.format(np.amax(unw_diff)))
|
||||
print('min phase difference: {}'.format(np.amin(unw_diff)))
|
||||
print('max-min: {}'.format(np.amax(unw_diff) - np.amin(unw_diff) ))
|
||||
|
||||
#ionosphere
|
||||
#fl = SPEED_OF_LIGHT / ionParam.radarWavelengthLower
|
||||
#fu = SPEED_OF_LIGHT / ionParam.radarWavelengthUpper
|
||||
f0 = (fl + fu) / 2.0
|
||||
|
||||
#dispersive
|
||||
if dispersive == 0:
|
||||
ionos = fl * fu * (lowerUnw * fu - upperUnw * fl) / f0 / (fu**2 - fl**2)
|
||||
#non-dispersive phase
|
||||
else:
|
||||
ionos = f0 * (upperUnw*fu - lowerUnw * fl) / (fu**2 - fl**2)
|
||||
|
||||
return ionos
|
||||
|
||||
|
||||
def gaussian(size, sigma, scale = 1.0):
|
||||
|
||||
if size % 2 != 1:
|
||||
raise Exception('size must be odd')
|
||||
hsize = (size - 1) / 2
|
||||
x = np.arange(-hsize, hsize + 1) * scale
|
||||
f = np.exp(-x**2/(2.0*sigma**2)) / (sigma * np.sqrt(2.0*np.pi))
|
||||
f2d=np.matlib.repmat(f, size, 1) * np.matlib.repmat(f.reshape(size, 1), 1, size)
|
||||
|
||||
return f2d/np.sum(f2d)
|
||||
|
||||
|
||||
def adaptive_gaussian_v0(ionos, wgt, size_max, size_min):
|
||||
'''
|
||||
This program performs Gaussian filtering with adaptive window size.
|
||||
ionos: ionosphere
|
||||
wgt: weight
|
||||
size_max: maximum window size
|
||||
size_min: minimum window size
|
||||
'''
|
||||
import scipy.signal as ss
|
||||
|
||||
length = (ionos.shape)[0]
|
||||
width = (ionos.shape)[1]
|
||||
flag = (ionos!=0) * (wgt!=0)
|
||||
ionos *= flag
|
||||
wgt *= flag
|
||||
|
||||
size_num = 100
|
||||
size = np.linspace(size_min, size_max, num=size_num, endpoint=True)
|
||||
std = np.zeros((length, width, size_num))
|
||||
flt = np.zeros((length, width, size_num))
|
||||
out = np.zeros((length, width, 1))
|
||||
|
||||
#calculate filterd image and standard deviation
|
||||
#sigma of window size: size_max
|
||||
sigma = size_max / 2.0
|
||||
for i in range(size_num):
|
||||
size2 = int(np.around(size[i]))
|
||||
if size2 % 2 == 0:
|
||||
size2 += 1
|
||||
if (i+1) % 10 == 0:
|
||||
print('min win: %4d, max win: %4d, current win: %4d'%(int(np.around(size_min)), int(np.around(size_max)), size2))
|
||||
g2d = gaussian(size2, sigma*size2/size_max, scale=1.0)
|
||||
scale = ss.fftconvolve(wgt, g2d, mode='same')
|
||||
flt[:, :, i] = ss.fftconvolve(ionos*wgt, g2d, mode='same') / (scale + (scale==0))
|
||||
#variance of resulting filtered sample
|
||||
scale = scale**2
|
||||
var = ss.fftconvolve(wgt, g2d**2, mode='same') / (scale + (scale==0))
|
||||
#in case there is a large area without data where scale is very small, which leads to wired values in variance
|
||||
var[np.nonzero(var<0)] = 0
|
||||
std[:, :, i] = np.sqrt(var)
|
||||
|
||||
std_mv = np.mean(std[np.nonzero(std!=0)], dtype=np.float64)
|
||||
diff_max = np.amax(np.absolute(std - std_mv)) + std_mv + 1
|
||||
std[np.nonzero(std==0)] = diff_max
|
||||
|
||||
index = np.nonzero(np.ones((length, width))) + ((np.argmin(np.absolute(std - std_mv), axis=2)).reshape(length*width), )
|
||||
out = flt[index]
|
||||
out = out.reshape((length, width))
|
||||
|
||||
#remove artifacts due to varying wgt
|
||||
size_smt = size_min
|
||||
if size_smt % 2 == 0:
|
||||
size_smt += 1
|
||||
g2d = gaussian(size_smt, size_smt/2.0, scale=1.0)
|
||||
scale = ss.fftconvolve((out!=0), g2d, mode='same')
|
||||
out2 = ss.fftconvolve(out, g2d, mode='same') / (scale + (scale==0))
|
||||
|
||||
return out2
|
||||
|
||||
|
||||
def least_sqares(H, S, W=None):
|
||||
'''
|
||||
#This can make use multiple threads (set environment variable: OMP_NUM_THREADS)
|
||||
linear equations: H theta = s
|
||||
W: weight matrix
|
||||
'''
|
||||
|
||||
S.reshape(H.shape[0], 1)
|
||||
if W is None:
|
||||
#use np.dot instead since some old python versions don't have matmul
|
||||
m1 = np.linalg.inv(np.dot(H.transpose(), H))
|
||||
Z = np.dot( np.dot(m1, H.transpose()) , S)
|
||||
else:
|
||||
#use np.dot instead since some old python versions don't have matmul
|
||||
m1 = np.linalg.inv(np.dot(np.dot(H.transpose(), W), H))
|
||||
Z = np.dot(np.dot(np.dot(m1, H.transpose()), W), S)
|
||||
|
||||
return Z.reshape(Z.size)
|
||||
|
||||
|
||||
def polyfit_2d(data, weight, order):
|
||||
'''
|
||||
fit a surface to a 2-d matrix
|
||||
|
||||
data: input 2-d data
|
||||
weight: corresponding 2-d weight
|
||||
order: order. must >= 1
|
||||
|
||||
zero samples in data and weight are OK.
|
||||
'''
|
||||
#import numpy as np
|
||||
|
||||
if order < 1:
|
||||
raise Exception('order must >= 1!\n')
|
||||
|
||||
if data.shape != weight.shape:
|
||||
raise Exception('data and weight must be of same size!\n')
|
||||
|
||||
(length, width) = data.shape
|
||||
#length*width, but below is better since no need to convert to int
|
||||
n = data.size
|
||||
|
||||
#number of coefficients
|
||||
ncoeff = 1
|
||||
for i in range(1, order+1):
|
||||
for j in range(i+1):
|
||||
ncoeff += 1
|
||||
|
||||
#row, column
|
||||
y, x = np.indices((length, width))
|
||||
x = x.flatten()
|
||||
y = y.flatten()
|
||||
z = data.flatten()
|
||||
weight = np.sqrt(weight.flatten())
|
||||
|
||||
#linear functions: H theta = s
|
||||
#compute observation matrix H (n*ncoeff)
|
||||
H = np.zeros((n, ncoeff))
|
||||
H[:,0] += 1
|
||||
k = 1
|
||||
for i in range(1, order+1):
|
||||
for j in range(i+1):
|
||||
#x and y do not need to be column vector here
|
||||
H[:, k] = x**(i-j)*y**(j)
|
||||
k += 1
|
||||
|
||||
#least squares
|
||||
#this is robust to singular cases
|
||||
coeff = np.linalg.lstsq(H*weight[:,None], z*weight, rcond=-1)[0]
|
||||
#this uses multiple threads, should be faster
|
||||
#coeff = least_sqares(H*weight[:,None], z*weight, W=None)
|
||||
|
||||
#fit surface
|
||||
data_fit = (np.dot(H, coeff)).reshape(length, width)
|
||||
|
||||
return (data_fit, coeff)
|
||||
|
||||
|
||||
def adaptive_gaussian(data, std, size_min, size_max, std_out0, fit=True):
|
||||
'''
|
||||
This program performs Gaussian filtering with adaptive window size.
|
||||
Cunren Liang, 11-JUN-2020
|
||||
|
||||
data: input raw data, numpy array
|
||||
std: standard deviation of raw data, numpy array
|
||||
size_min: minimum filter window size
|
||||
size_max: maximum filter window size (size_min <= size_max, size_min == size_max is allowed)
|
||||
std_out0: standard deviation of output data
|
||||
fit: whether do fitting before gaussian filtering
|
||||
'''
|
||||
import scipy.signal as ss
|
||||
|
||||
|
||||
(length, width) = data.shape
|
||||
|
||||
#assume zero-value samples are invalid
|
||||
index = np.nonzero(np.logical_or(data==0, std==0))
|
||||
data[index] = 0
|
||||
std[index] = 0
|
||||
#compute weight using standard deviation
|
||||
wgt = 1.0 / (std**2 + (std==0))
|
||||
wgt[index] = 0
|
||||
|
||||
#compute number of gaussian filters
|
||||
if size_min > size_max:
|
||||
raise Exception('size_min: {} > size_max: {}\n'.format(size_min, size_max))
|
||||
|
||||
if size_min % 2 == 0:
|
||||
size_min += 1
|
||||
if size_max % 2 == 0:
|
||||
size_max += 1
|
||||
|
||||
size_num = int((size_max - size_min) / 2 + 1)
|
||||
#'size_num == 1' is checked to be OK starting from here
|
||||
|
||||
|
||||
#create gaussian filters
|
||||
print('compute Gaussian filters\n')
|
||||
gaussian_filters = []
|
||||
for i in range(size_num):
|
||||
size = int(size_min + i * 2)
|
||||
gaussian_filters.append(gaussian(size, size/2.0, scale=1.0))
|
||||
|
||||
|
||||
#compute standard deviation after filtering coresponding to each of gaussian_filters
|
||||
#if value is 0, there is no valid sample in the gaussian window
|
||||
print('compute standard deviation after filtering for each filtering window size')
|
||||
std_filt = np.zeros((length, width, size_num))
|
||||
for i in range(size_num):
|
||||
size = int(size_min + i * 2)
|
||||
print('current window size: %4d, min window size: %4d, max window size: %4d' % (size, size_min, size_max), end='\r', flush=True)
|
||||
#robust zero value detector. non-zero convolution result at least >= 1, so can use 0.5
|
||||
#as threshold to detect zero-value result
|
||||
index = np.nonzero(ss.fftconvolve(wgt!=0, gaussian_filters[i]!=0, mode='same') < 0.5)
|
||||
scale = ss.fftconvolve(wgt, gaussian_filters[i], mode='same')
|
||||
scale[index] = 0
|
||||
#variance of resulting filtered sample
|
||||
var_filt = ss.fftconvolve(wgt, gaussian_filters[i]**2, mode='same') / (scale**2 + (scale==0))
|
||||
var_filt[index] = 0
|
||||
std_filt[:, :, i] = np.sqrt(var_filt)
|
||||
print('\n')
|
||||
|
||||
|
||||
#find gaussian window size (3rd-dimension index of the window size in gaussian_filters)
|
||||
#if value is -1, there is no valid sample in any of the gaussian windows
|
||||
#and therefore no filtering in the next step is needed
|
||||
print('find Gaussian window size to use')
|
||||
gaussian_index = np.zeros((length, width), dtype=np.int32)
|
||||
std_filt2 = np.zeros((length, width))
|
||||
for i in range(length):
|
||||
if (((i+1)%50) == 0):
|
||||
print('processing line %6d of %6d' % (i+1, length), end='\r', flush=True)
|
||||
for j in range(width):
|
||||
if np.sum(std_filt[i, j, :]) == 0:
|
||||
gaussian_index[i, j] = -1
|
||||
else:
|
||||
gaussian_index[i, j] = size_num - 1
|
||||
for k in range(size_num):
|
||||
if (std_filt[i, j, k] != 0) and (std_filt[i, j, k] <= std_out0):
|
||||
gaussian_index[i, j] = k
|
||||
break
|
||||
if gaussian_index[i, j] != -1:
|
||||
std_filt2[i, j] = std_filt[i, j, gaussian_index[i, j]]
|
||||
del std_filt
|
||||
print("processing line %6d of %6d\n" % (length, length))
|
||||
|
||||
|
||||
#adaptive gaussian filtering
|
||||
print('filter image')
|
||||
data_out = np.zeros((length, width))
|
||||
std_out = np.zeros((length, width))
|
||||
window_size_out = np.zeros((length, width), dtype=np.int16)
|
||||
for i in range(length):
|
||||
#if (((i+1)%5) == 0):
|
||||
print('processing line %6d of %6d' % (i+1, length), end='\r', flush=True)
|
||||
for j in range(width):
|
||||
#if value is -1, there is no valid sample in any of the gaussian windows
|
||||
#and therefore no filtering in the next step is needed
|
||||
if gaussian_index[i, j] == -1:
|
||||
continue
|
||||
|
||||
#1. extract data
|
||||
size = int(size_min + gaussian_index[i, j] * 2)
|
||||
size_half = int((size - 1) / 2)
|
||||
window_size_out[i, j] = size
|
||||
|
||||
#index in original data
|
||||
first_line = max(i-size_half, 0)
|
||||
last_line = min(i+size_half, length-1)
|
||||
first_column = max(j-size_half, 0)
|
||||
last_column = min(j+size_half, width-1)
|
||||
length_valid = last_line - first_line + 1
|
||||
width_valid = last_column - first_column + 1
|
||||
|
||||
#index in filter window
|
||||
if first_line == 0:
|
||||
last_line2 = size - 1
|
||||
first_line2 = last_line2 - (length_valid - 1)
|
||||
else:
|
||||
first_line2 = 0
|
||||
last_line2 = first_line2 + (length_valid - 1)
|
||||
if first_column == 0:
|
||||
last_column2 = size - 1
|
||||
first_column2 = last_column2 - (width_valid - 1)
|
||||
else:
|
||||
first_column2 = 0
|
||||
last_column2 = first_column2 + (width_valid - 1)
|
||||
|
||||
#prepare data and weight within the window
|
||||
data_window = np.zeros((size, size))
|
||||
wgt_window = np.zeros((size, size))
|
||||
data_window[first_line2:last_line2+1, first_column2:last_column2+1] = data[first_line:last_line+1, first_column:last_column+1]
|
||||
wgt_window[first_line2:last_line2+1, first_column2:last_column2+1] = wgt[first_line:last_line+1, first_column:last_column+1]
|
||||
#number of valid samples in the filtering window
|
||||
n_valid = np.sum(data_window!=0)
|
||||
|
||||
#2. fit
|
||||
#order, n_coeff = (1, 3)
|
||||
order, n_coeff = (2, 6)
|
||||
if fit:
|
||||
#must have enough samples to do fitting
|
||||
#even if order is 2, n_coeff * 3 is much smaller than size_min*size_min in most cases.
|
||||
if n_valid > n_coeff * 3:
|
||||
#data_fit = weight_fitting(data_window, wgt_window, size, size, 1, 1, 1, 1, order)
|
||||
data_fit, coeff = polyfit_2d(data_window, wgt_window, order)
|
||||
index = np.nonzero(data_window!=0)
|
||||
data_window[index] -= data_fit[index]
|
||||
|
||||
#3. filter
|
||||
wgt_window_2 = wgt_window * gaussian_filters[gaussian_index[i, j]]
|
||||
scale = 1.0/np.sum(wgt_window_2)
|
||||
wgt_window_2 *= scale
|
||||
data_out[i, j] = np.sum(wgt_window_2 * data_window)
|
||||
#std_out[i, j] = scale * np.sqrt(np.sum(wgt_window*(gaussian_filters[gaussian_index[i, j]]**2)))
|
||||
#already computed
|
||||
std_out[i, j] = std_filt2[i, j]
|
||||
#print('std_out[i, j], std_filt2[i, j]', std_out[i, j], std_filt2[i, j])
|
||||
|
||||
#4. add back filtered value
|
||||
if fit:
|
||||
if n_valid > n_coeff * 3:
|
||||
data_out[i, j] += data_fit[size_half, size_half]
|
||||
print('\n')
|
||||
|
||||
return (data_out, std_out, window_size_out)
|
||||
|
||||
|
||||
def reformatMaskedAreas(maskedAreas, length, width):
|
||||
'''
|
||||
reformat masked areas coordinates that are ready to use
|
||||
'maskedAreas' is a 2-D list. Each element in the 2-D list is a four-element list: [firstLine,
|
||||
lastLine, firstColumn, lastColumn], with line/column numbers starting with 1. If one of the
|
||||
four elements is specified with -1, the program will use firstLine/lastLine/firstColumn/
|
||||
lastColumn instead.
|
||||
|
||||
output is a 2-D list containing the corresponding python-list/array-format indexes.
|
||||
'''
|
||||
numberOfAreas = len(maskedAreas)
|
||||
maskedAreasReformated = [[0, length, 0, width] for i in range(numberOfAreas)]
|
||||
|
||||
for i in range(numberOfAreas):
|
||||
if maskedAreas[i][0] != -1:
|
||||
maskedAreasReformated[i][0] = maskedAreas[i][0] - 1
|
||||
if maskedAreas[i][1] != -1:
|
||||
maskedAreasReformated[i][1] = maskedAreas[i][1]
|
||||
if maskedAreas[i][2] != -1:
|
||||
maskedAreasReformated[i][2] = maskedAreas[i][2] - 1
|
||||
if maskedAreas[i][3] != -1:
|
||||
maskedAreasReformated[i][3] = maskedAreas[i][3]
|
||||
if (not (0 <= maskedAreasReformated[i][0] <= length-1)) or \
|
||||
(not (1 <= maskedAreasReformated[i][1] <= length)) or \
|
||||
(not (0 <= maskedAreasReformated[i][2] <= width-1)) or \
|
||||
(not (1 <= maskedAreasReformated[i][3] <= width)) or \
|
||||
(not (maskedAreasReformated[i][1]-maskedAreasReformated[i][0]>=1)) or \
|
||||
(not (maskedAreasReformated[i][3]-maskedAreasReformated[i][2]>=1)):
|
||||
raise Exception('area {} masked out in ionospheric phase estimation not correct'.format(i+1))
|
||||
|
||||
return maskedAreasReformated
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,558 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
import isceobj
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runIonSubband')
|
||||
|
||||
def runIonSubband(self):
|
||||
'''create subband interferograms
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
if not self.doIon:
|
||||
catalog.printToLog(logger, "runIonSubband")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
return
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
#using 1/3, 1/3, 1/3 band split
|
||||
radarWavelength = referenceTrack.radarWavelength
|
||||
rangeBandwidth = referenceTrack.frames[0].swaths[0].rangeBandwidth
|
||||
rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate
|
||||
radarWavelengthLower = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength - rangeBandwidth / 3.0)
|
||||
radarWavelengthUpper = SPEED_OF_LIGHT/(SPEED_OF_LIGHT / radarWavelength + rangeBandwidth / 3.0)
|
||||
subbandRadarWavelength = [radarWavelengthLower, radarWavelengthUpper]
|
||||
subbandBandWidth = [rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate]
|
||||
subbandFrequencyCenter = [-rangeBandwidth / 3.0 / rangeSamplingRate, rangeBandwidth / 3.0 / rangeSamplingRate]
|
||||
|
||||
subbandPrefix = ['lower', 'upper']
|
||||
|
||||
'''
|
||||
ionDir = {
|
||||
ionDir['swathMosaic'] : 'mosaic',
|
||||
ionDir['insar'] : 'insar',
|
||||
ionDir['ion'] : 'ion',
|
||||
ionDir['subband'] : ['lower', 'upper'],
|
||||
ionDir['ionCal'] : 'ion_cal'
|
||||
}
|
||||
'''
|
||||
#define upper level directory names
|
||||
ionDir = defineIonDir()
|
||||
|
||||
|
||||
self._insar.subbandRadarWavelength = subbandRadarWavelength
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 1. create directories
|
||||
############################################################
|
||||
#create and enter 'ion' directory
|
||||
#after finishing each step, we are in this directory
|
||||
os.makedirs(ionDir['ion'], exist_ok=True)
|
||||
os.chdir(ionDir['ion'])
|
||||
|
||||
#create insar processing directories
|
||||
for k in range(2):
|
||||
subbandDir = ionDir['subband'][k]
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
fullDir = os.path.join(subbandDir, frameDir, swathDir)
|
||||
os.makedirs(fullDir, exist_ok=True)
|
||||
|
||||
#create ionospheric phase directory
|
||||
os.makedirs(ionDir['ionCal'], exist_ok=True)
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 2. create subband interferograms
|
||||
############################################################
|
||||
import numpy as np
|
||||
import stdproc
|
||||
from iscesys.StdOEL.StdOELPy import create_writer
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import readOffset
|
||||
from contrib.alos2proc.alos2proc import rg_filter
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
|
||||
#skip this time consuming process, if interferogram already exists
|
||||
if os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram)) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.interferogram+'.xml')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude)) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][0], frameDir, swathDir, self._insar.amplitude+'.xml')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram)) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.vrt')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.interferogram+'.xml')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude)) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.vrt')) and \
|
||||
os.path.isfile(os.path.join(ionDir['subband'][1], frameDir, swathDir, self._insar.amplitude+'.xml')):
|
||||
print('interferogram already exists at swath {}, frame {}'.format(swathNumber, frameNumber))
|
||||
continue
|
||||
|
||||
#filter reference and secondary images
|
||||
for slcx in [self._insar.referenceSlc, self._insar.secondarySlc]:
|
||||
slc = os.path.join('../', frameDir, swathDir, slcx)
|
||||
slcLower = os.path.join(ionDir['subband'][0], frameDir, swathDir, slcx)
|
||||
slcUpper = os.path.join(ionDir['subband'][1], frameDir, swathDir, slcx)
|
||||
rg_filter(slc, 2,
|
||||
[slcLower, slcUpper],
|
||||
subbandBandWidth,
|
||||
subbandFrequencyCenter,
|
||||
257, 2048, 0.1, 0, 0.0)
|
||||
#resample
|
||||
for k in range(2):
|
||||
os.chdir(os.path.join(ionDir['subband'][k], frameDir, swathDir))
|
||||
#recreate xml file to remove the file path
|
||||
#can also use fixImageXml.py?
|
||||
for x in [self._insar.referenceSlc, self._insar.secondarySlc]:
|
||||
img = isceobj.createSlcImage()
|
||||
img.load(x + '.xml')
|
||||
img.setFilename(x)
|
||||
img.extraFilename = x + '.vrt'
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
#############################################
|
||||
#1. form interferogram
|
||||
#############################################
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
refinedOffsets = readOffset(os.path.join('../../../../', frameDir, swathDir, 'cull.off'))
|
||||
intWidth = int(referenceSwath.numberOfSamples / self._insar.numberRangeLooks1)
|
||||
intLength = int(referenceSwath.numberOfLines / self._insar.numberAzimuthLooks1)
|
||||
dopplerVsPixel = [i/secondarySwath.prf for i in secondarySwath.dopplerVsPixel]
|
||||
|
||||
#reference slc
|
||||
mSLC = isceobj.createSlcImage()
|
||||
mSLC.load(self._insar.referenceSlc+'.xml')
|
||||
mSLC.setAccessMode('read')
|
||||
mSLC.createImage()
|
||||
|
||||
#secondary slc
|
||||
sSLC = isceobj.createSlcImage()
|
||||
sSLC.load(self._insar.secondarySlc+'.xml')
|
||||
sSLC.setAccessMode('read')
|
||||
sSLC.createImage()
|
||||
|
||||
#interferogram
|
||||
interf = isceobj.createIntImage()
|
||||
interf.setFilename(self._insar.interferogram)
|
||||
interf.setWidth(intWidth)
|
||||
interf.setAccessMode('write')
|
||||
interf.createImage()
|
||||
|
||||
#amplitdue
|
||||
amplitude = isceobj.createAmpImage()
|
||||
amplitude.setFilename(self._insar.amplitude)
|
||||
amplitude.setWidth(intWidth)
|
||||
amplitude.setAccessMode('write')
|
||||
amplitude.createImage()
|
||||
|
||||
#create a writer for resamp
|
||||
stdWriter = create_writer("log", "", True, filename="resamp.log")
|
||||
stdWriter.setFileTag("resamp", "log")
|
||||
stdWriter.setFileTag("resamp", "err")
|
||||
stdWriter.setFileTag("resamp", "out")
|
||||
|
||||
|
||||
#set up resampling program now
|
||||
#The setting has been compared with resamp_roi's setting in ROI_pac item by item.
|
||||
#The two kinds of setting are exactly the same. The number of setting items are
|
||||
#exactly the same
|
||||
objResamp = stdproc.createResamp()
|
||||
objResamp.wireInputPort(name='offsets', object=refinedOffsets)
|
||||
objResamp.stdWriter = stdWriter
|
||||
objResamp.setNumberFitCoefficients(6)
|
||||
objResamp.setNumberRangeBin1(referenceSwath.numberOfSamples)
|
||||
objResamp.setNumberRangeBin2(secondarySwath.numberOfSamples)
|
||||
objResamp.setStartLine(1)
|
||||
objResamp.setNumberLines(referenceSwath.numberOfLines)
|
||||
objResamp.setFirstLineOffset(1)
|
||||
objResamp.setDopplerCentroidCoefficients(dopplerVsPixel)
|
||||
objResamp.setRadarWavelength(subbandRadarWavelength[k])
|
||||
objResamp.setSlantRangePixelSpacing(secondarySwath.rangePixelSize)
|
||||
objResamp.setNumberRangeLooks(self._insar.numberRangeLooks1)
|
||||
objResamp.setNumberAzimuthLooks(self._insar.numberAzimuthLooks1)
|
||||
objResamp.setFlattenWithOffsetFitFlag(0)
|
||||
objResamp.resamp(mSLC, sSLC, interf, amplitude)
|
||||
|
||||
#finialize images
|
||||
mSLC.finalizeImage()
|
||||
sSLC.finalizeImage()
|
||||
interf.finalizeImage()
|
||||
amplitude.finalizeImage()
|
||||
stdWriter.finalize()
|
||||
|
||||
#############################################
|
||||
#2. trim amplitude
|
||||
#############################################
|
||||
#using memmap instead, which should be faster, since we only have a few pixels to change
|
||||
amp=np.memmap(self._insar.amplitude, dtype='complex64', mode='r+', shape=(intLength, intWidth))
|
||||
index = np.nonzero( (np.real(amp)==0) + (np.imag(amp)==0) )
|
||||
amp[index]=0
|
||||
|
||||
#Deletion flushes memory changes to disk before removing the object:
|
||||
del amp
|
||||
|
||||
#############################################
|
||||
#3. delete subband slcs
|
||||
#############################################
|
||||
os.remove(self._insar.referenceSlc)
|
||||
os.remove(self._insar.referenceSlc + '.vrt')
|
||||
os.remove(self._insar.referenceSlc + '.xml')
|
||||
os.remove(self._insar.secondarySlc)
|
||||
os.remove(self._insar.secondarySlc + '.vrt')
|
||||
os.remove(self._insar.secondarySlc + '.xml')
|
||||
|
||||
os.chdir('../../../')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 3. mosaic swaths
|
||||
############################################################
|
||||
from isceobj.Alos2Proc.runSwathMosaic import swathMosaic
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
for k in range(2):
|
||||
os.chdir(ionDir['subband'][k])
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
|
||||
mosaicDir = ionDir['swathMosaic']
|
||||
os.makedirs(mosaicDir, exist_ok=True)
|
||||
os.chdir(mosaicDir)
|
||||
|
||||
if not (
|
||||
((self._insar.modeCombination == 21) or \
|
||||
(self._insar.modeCombination == 22) or \
|
||||
(self._insar.modeCombination == 31) or \
|
||||
(self._insar.modeCombination == 32))
|
||||
and
|
||||
(self._insar.endingSwath-self._insar.startingSwath+1 > 1)
|
||||
):
|
||||
import shutil
|
||||
swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber)
|
||||
|
||||
# if not os.path.isfile(self._insar.interferogram):
|
||||
# os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram)
|
||||
# shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
# shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
# if not os.path.isfile(self._insar.amplitude):
|
||||
# os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude)
|
||||
# shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
# shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram)
|
||||
os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude)
|
||||
os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
#no need to update frame parameters here
|
||||
os.chdir('../')
|
||||
#no need to save parameter file here
|
||||
os.chdir('../')
|
||||
|
||||
continue
|
||||
|
||||
#choose offsets
|
||||
numberOfFrames = len(referenceTrack.frames)
|
||||
numberOfSwaths = len(referenceTrack.frames[i].swaths)
|
||||
if self.swathOffsetMatching:
|
||||
#no need to do this as the API support 2-d list
|
||||
#rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
#azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
rangeOffsets = self._insar.swathRangeOffsetMatchingReference
|
||||
azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference
|
||||
|
||||
else:
|
||||
#rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
#azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
rangeOffsets = self._insar.swathRangeOffsetGeometricalReference
|
||||
azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference
|
||||
|
||||
rangeOffsets = rangeOffsets[i]
|
||||
azimuthOffsets = azimuthOffsets[i]
|
||||
|
||||
#list of input files
|
||||
inputInterferograms = []
|
||||
inputAmplitudes = []
|
||||
#phaseDiff = [None]
|
||||
swathPhaseDiffIon = [self.swathPhaseDiffLowerIon, self.swathPhaseDiffUpperIon]
|
||||
phaseDiff = swathPhaseDiffIon[k]
|
||||
if swathPhaseDiffIon[k] is None:
|
||||
phaseDiff = None
|
||||
else:
|
||||
phaseDiff = swathPhaseDiffIon[k][i]
|
||||
phaseDiff.insert(0, None)
|
||||
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram))
|
||||
inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude))
|
||||
|
||||
if False:
|
||||
#compute phase needed to be compensated using startingRange
|
||||
if j >= 1:
|
||||
#phaseDiffSwath1 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange)/subbandRadarWavelength[k]
|
||||
#phaseDiffSwath2 = -4.0 * np.pi * (referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange)/subbandRadarWavelength[k]
|
||||
phaseDiffSwath1 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
|
||||
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j-1].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
|
||||
phaseDiffSwath2 = +4.0 * np.pi * referenceTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k]) \
|
||||
-4.0 * np.pi * secondaryTrack.frames[i].swaths[j].startingRange * (1.0/radarWavelength - 1.0/subbandRadarWavelength[k])
|
||||
if referenceTrack.frames[i].swaths[j-1].startingRange - secondaryTrack.frames[i].swaths[j-1].startingRange == \
|
||||
referenceTrack.frames[i].swaths[j].startingRange - secondaryTrack.frames[i].swaths[j].startingRange:
|
||||
#phaseDiff.append(phaseDiffSwath2 - phaseDiffSwath1)
|
||||
#if reference and secondary versions are all before or after version 2.025 (starting range error < 0.5 m),
|
||||
#it should be OK to do the above.
|
||||
#see results in neom where it meets the above requirement, but there is still phase diff
|
||||
#to be less risky, we do not input values here
|
||||
phaseDiff.append(None)
|
||||
else:
|
||||
phaseDiff.append(None)
|
||||
|
||||
#note that frame parameters are updated after mosaicking, here no need to update parameters
|
||||
#mosaic amplitudes
|
||||
swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0)
|
||||
#mosaic interferograms
|
||||
#These are for ALOS-2, may need to change for ALOS-4!
|
||||
phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.179664007520499, 2.6766909968024932, 3.130810857]
|
||||
|
||||
if False:
|
||||
if (referenceTrack.frames[i].processingSoftwareVersion == '2.025' and secondaryTrack.frames[i].processingSoftwareVersion == '2.023') or \
|
||||
(referenceTrack.frames[i].processingSoftwareVersion == '2.023' and secondaryTrack.frames[i].processingSoftwareVersion == '2.025'):
|
||||
|
||||
# changed value number of samples to estimate new value new values estimate area
|
||||
###########################################################################################################################
|
||||
# 2.6766909968024932-->2.6581660335779866 1808694 d169-f2850, north CA
|
||||
# 2.179664007520499 -->2.204125866652153 131120 d169-f2850, north CA
|
||||
|
||||
phaseDiffFixed = [0.0, 0.4754024578084084, 0.9509913179406437, 1.4261648478671614, 2.204125866652153, 2.6581660335779866, 3.130810857]
|
||||
|
||||
snapThreshold = 0.2
|
||||
|
||||
#the above preparetions only applies to 'self._insar.modeCombination == 21'
|
||||
#looks like it also works for 31 (scansarNominalModes-stripmapModes)
|
||||
if self._insar.modeCombination != 21:
|
||||
phaseDiff = None
|
||||
phaseDiffFixed = None
|
||||
snapThreshold = None
|
||||
|
||||
#whether snap for each swath
|
||||
if self.swathPhaseDiffSnapIon == None:
|
||||
snapSwath = [[True for jjj in range(numberOfSwaths-1)] for iii in range(numberOfFrames)]
|
||||
else:
|
||||
snapSwath = self.swathPhaseDiffSnapIon
|
||||
if len(snapSwath) != numberOfFrames:
|
||||
raise Exception('please specify each frame for parameter: swath phase difference snap to fixed values')
|
||||
for iii in range(numberOfFrames):
|
||||
if len(snapSwath[iii]) != (numberOfSwaths-1):
|
||||
raise Exception('please specify correct number of swaths for parameter: swath phase difference snap to fixed values')
|
||||
|
||||
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=False,
|
||||
phaseCompensation=True, phaseDiff=phaseDiff, phaseDiffFixed=phaseDiffFixed, snapThreshold=snapThreshold, snapSwath=snapSwath[i], pcRangeLooks=1, pcAzimuthLooks=4,
|
||||
filt=False, resamplingMethod=1)
|
||||
|
||||
#the first item is meaningless for all the following list, so only record the following items
|
||||
if phaseDiff == None:
|
||||
phaseDiff = [None for iii in range(self._insar.startingSwath, self._insar.endingSwath + 1)]
|
||||
catalog.addItem('frame {} {} band swath phase diff input'.format(frameNumber, ionDir['subband'][k]), phaseDiff[1:], 'runIonSubband')
|
||||
catalog.addItem('frame {} {} band swath phase diff estimated'.format(frameNumber, ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
|
||||
catalog.addItem('frame {} {} band swath phase diff used'.format(frameNumber, ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
|
||||
catalog.addItem('frame {} {} band swath phase diff used source'.format(frameNumber, ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
|
||||
catalog.addItem('frame {} {} band swath phase diff samples used'.format(frameNumber, ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
|
||||
#check if there is value around 3.130810857, which may not be stable
|
||||
phaseDiffUnstableExist = False
|
||||
for xxx in phaseDiffUsed:
|
||||
if abs(abs(xxx) - 3.130810857) < 0.2:
|
||||
phaseDiffUnstableExist = True
|
||||
catalog.addItem('frame {} {} band swath phase diff unstable exists'.format(frameNumber, ionDir['subband'][k]), phaseDiffUnstableExist, 'runIonSubband')
|
||||
|
||||
create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp')
|
||||
create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int')
|
||||
|
||||
#update secondary frame parameters here, here no need to update parameters
|
||||
os.chdir('../')
|
||||
#save parameter file, here no need to save parameter file
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 4. mosaic frames
|
||||
############################################################
|
||||
from isceobj.Alos2Proc.runFrameMosaic import frameMosaic
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
for k in range(2):
|
||||
os.chdir(ionDir['subband'][k])
|
||||
|
||||
mosaicDir = ionDir['insar']
|
||||
os.makedirs(mosaicDir, exist_ok=True)
|
||||
os.chdir(mosaicDir)
|
||||
|
||||
numberOfFrames = len(referenceTrack.frames)
|
||||
if numberOfFrames == 1:
|
||||
import shutil
|
||||
frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0]))
|
||||
# if not os.path.isfile(self._insar.interferogram):
|
||||
# os.symlink(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram)
|
||||
# #shutil.copy2() can overwrite
|
||||
# shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
# shutil.copy2(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
# if not os.path.isfile(self._insar.amplitude):
|
||||
# os.symlink(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude)
|
||||
# shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
# shutil.copy2(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
os.rename(os.path.join('../', frameDir, self._insar.interferogram), self._insar.interferogram)
|
||||
os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
os.rename(os.path.join('../', frameDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
os.rename(os.path.join('../', frameDir, self._insar.amplitude), self._insar.amplitude)
|
||||
os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
os.rename(os.path.join('../', frameDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
#update track parameters, no need to update track parameters here
|
||||
|
||||
else:
|
||||
#choose offsets
|
||||
if self.frameOffsetMatching:
|
||||
rangeOffsets = self._insar.frameRangeOffsetMatchingReference
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference
|
||||
else:
|
||||
rangeOffsets = self._insar.frameRangeOffsetGeometricalReference
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetGeometricalReference
|
||||
|
||||
#list of input files
|
||||
inputInterferograms = []
|
||||
inputAmplitudes = []
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
inputInterferograms.append(os.path.join('../', frameDir, 'mosaic', self._insar.interferogram))
|
||||
inputAmplitudes.append(os.path.join('../', frameDir, 'mosaic', self._insar.amplitude))
|
||||
|
||||
#note that track parameters are updated after mosaicking
|
||||
#mosaic amplitudes
|
||||
frameMosaic(referenceTrack, inputAmplitudes, self._insar.amplitude,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
updateTrack=False, phaseCompensation=False, resamplingMethod=0)
|
||||
#mosaic interferograms
|
||||
(phaseDiffEst, phaseDiffUsed, phaseDiffSource, numberOfValidSamples) = frameMosaic(referenceTrack, inputInterferograms, self._insar.interferogram,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
updateTrack=False, phaseCompensation=True, resamplingMethod=1)
|
||||
|
||||
create_xml(self._insar.amplitude, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'amp')
|
||||
create_xml(self._insar.interferogram, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'int')
|
||||
|
||||
catalog.addItem('{} band frame phase diff estimated'.format(ionDir['subband'][k]), phaseDiffEst[1:], 'runIonSubband')
|
||||
catalog.addItem('{} band frame phase diff used'.format(ionDir['subband'][k]), phaseDiffUsed[1:], 'runIonSubband')
|
||||
catalog.addItem('{} band frame phase diff used source'.format(ionDir['subband'][k]), phaseDiffSource[1:], 'runIonSubband')
|
||||
catalog.addItem('{} band frame phase diff samples used'.format(ionDir['subband'][k]), numberOfValidSamples[1:], 'runIonSubband')
|
||||
|
||||
#update secondary parameters here, no need to update secondary parameters here
|
||||
|
||||
os.chdir('../')
|
||||
#save parameter file, no need to save parameter file here
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 5. clear frame processing files
|
||||
############################################################
|
||||
import shutil
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
for k in range(2):
|
||||
os.chdir(ionDir['subband'][k])
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
#keep subswath interferograms
|
||||
#shutil.rmtree(frameDir)
|
||||
#cmd = 'rm -rf {}'.format(frameDir)
|
||||
#runCmd(cmd)
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 6. create differential interferograms
|
||||
############################################################
|
||||
import numpy as np
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
|
||||
for k in range(2):
|
||||
os.chdir(ionDir['subband'][k])
|
||||
|
||||
insarDir = ionDir['insar']
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
rangePixelSize = self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize
|
||||
radarWavelength = subbandRadarWavelength[k]
|
||||
rectRangeOffset = os.path.join('../../../', insarDir, self._insar.rectRangeOffset)
|
||||
|
||||
cmd = "imageMath.py -e='a*exp(-1.0*J*b*4.0*{}*{}/{}) * (b!=0)' --a={} --b={} -o {} -t cfloat".format(np.pi, rangePixelSize, radarWavelength, self._insar.interferogram, rectRangeOffset, self._insar.differentialInterferogram)
|
||||
runCmd(cmd)
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runIonSubband")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def defineIonDir():
|
||||
'''
|
||||
define directory names for ionospheric correction
|
||||
'''
|
||||
|
||||
ionDir = {
|
||||
#swath mosaicking directory
|
||||
'swathMosaic' : 'mosaic',
|
||||
#final insar processing directory
|
||||
'insar' : 'insar',
|
||||
#ionospheric correction directory
|
||||
'ion' : 'ion',
|
||||
#subband directory
|
||||
'subband' : ['lower', 'upper'],
|
||||
#final ionospheric phase calculation directory
|
||||
'ionCal' : 'ion_cal'
|
||||
}
|
||||
|
||||
return ionDir
|
||||
|
||||
|
||||
def defineIonFilenames():
|
||||
pass
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,257 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import shutil
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runIonUwrap')
|
||||
|
||||
def runIonUwrap(self):
|
||||
'''unwrap subband interferograms
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
if not self.doIon:
|
||||
catalog.printToLog(logger, "runIonUwrap")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
return
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
#secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
ionUwrap(self, referenceTrack)
|
||||
|
||||
os.chdir('../../')
|
||||
catalog.printToLog(logger, "runIonUwrap")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def ionUwrap(self, referenceTrack, latLonDir=None):
|
||||
|
||||
wbdFile = os.path.abspath(self._insar.wbd)
|
||||
|
||||
from isceobj.Alos2Proc.runIonSubband import defineIonDir
|
||||
ionDir = defineIonDir()
|
||||
subbandPrefix = ['lower', 'upper']
|
||||
|
||||
ionCalDir = os.path.join(ionDir['ion'], ionDir['ionCal'])
|
||||
os.makedirs(ionCalDir, exist_ok=True)
|
||||
os.chdir(ionCalDir)
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 1. take looks
|
||||
############################################################
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from contrib.alos2proc.alos2proc import look
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
|
||||
|
||||
ml2 = '_{}rlks_{}alks'.format(self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
|
||||
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon)
|
||||
|
||||
for k in range(2):
|
||||
fullbandDir = os.path.join('../../', ionDir['insar'])
|
||||
subbandDir = os.path.join('../', ionDir['subband'][k], ionDir['insar'])
|
||||
prefix = subbandPrefix[k]
|
||||
|
||||
amp = isceobj.createImage()
|
||||
amp.load(os.path.join(subbandDir, self._insar.amplitude)+'.xml')
|
||||
width = amp.width
|
||||
length = amp.length
|
||||
width2 = int(width / self._insar.numberRangeLooksIon)
|
||||
length2 = int(length / self._insar.numberAzimuthLooksIon)
|
||||
|
||||
#take looks
|
||||
look(os.path.join(subbandDir, self._insar.differentialInterferogram), prefix+ml2+'.int', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 4, 0, 1)
|
||||
create_xml(prefix+ml2+'.int', width2, length2, 'int')
|
||||
look(os.path.join(subbandDir, self._insar.amplitude), prefix+ml2+'.amp', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 4, 1, 1)
|
||||
create_xml(prefix+ml2+'.amp', width2, length2, 'amp')
|
||||
|
||||
# #water body
|
||||
# if k == 0:
|
||||
# wbdOutFile = os.path.join(fullbandDir, self._insar.wbdOut)
|
||||
# if os.path.isfile(wbdOutFile):
|
||||
# look(wbdOutFile, 'wbd'+ml2+'.wbd', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 0, 0, 1)
|
||||
# create_xml('wbd'+ml2+'.wbd', width2, length2, 'byte')
|
||||
|
||||
#water body
|
||||
if k == 0:
|
||||
if latLonDir is None:
|
||||
latFile = os.path.join(fullbandDir, self._insar.latitude)
|
||||
lonFile = os.path.join(fullbandDir, self._insar.longitude)
|
||||
else:
|
||||
latFile = os.path.join('../../', latLonDir, self._insar.latitude)
|
||||
lonFile = os.path.join('../../', latLonDir, self._insar.longitude)
|
||||
look(latFile, 'lat'+ml2+'.lat', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1)
|
||||
look(lonFile, 'lon'+ml2+'.lon', width, self._insar.numberRangeLooksIon, self._insar.numberAzimuthLooksIon, 3, 0, 1)
|
||||
create_xml('lat'+ml2+'.lat', width2, length2, 'double')
|
||||
create_xml('lon'+ml2+'.lon', width2, length2, 'double')
|
||||
waterBodyRadar('lat'+ml2+'.lat', 'lon'+ml2+'.lon', wbdFile, 'wbd'+ml2+'.wbd')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 2. compute coherence
|
||||
############################################################
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence
|
||||
|
||||
lowerbandInterferogramFile = subbandPrefix[0]+ml2+'.int'
|
||||
upperbandInterferogramFile = subbandPrefix[1]+ml2+'.int'
|
||||
lowerbandAmplitudeFile = subbandPrefix[0]+ml2+'.amp'
|
||||
upperbandAmplitudeFile = subbandPrefix[1]+ml2+'.amp'
|
||||
lowerbandCoherenceFile = subbandPrefix[0]+ml2+'.cor'
|
||||
upperbandCoherenceFile = subbandPrefix[1]+ml2+'.cor'
|
||||
coherenceFile = 'diff'+ml2+'.cor'
|
||||
|
||||
lowerint = np.fromfile(lowerbandInterferogramFile, dtype=np.complex64).reshape(length2, width2)
|
||||
upperint = np.fromfile(upperbandInterferogramFile, dtype=np.complex64).reshape(length2, width2)
|
||||
loweramp = np.fromfile(lowerbandAmplitudeFile, dtype=np.float32).reshape(length2, width2*2)
|
||||
upperamp = np.fromfile(upperbandAmplitudeFile, dtype=np.float32).reshape(length2, width2*2)
|
||||
|
||||
#compute coherence only using interferogram
|
||||
#here I use differential interferogram of lower and upper band interferograms
|
||||
#so that coherence is not affected by fringes
|
||||
cord = cal_coherence(lowerint*np.conjugate(upperint), win=3, edge=4)
|
||||
cor = np.zeros((length2*2, width2), dtype=np.float32)
|
||||
cor[0:length2*2:2, :] = np.sqrt( (np.absolute(lowerint)+np.absolute(upperint))/2.0 )
|
||||
cor[1:length2*2:2, :] = cord
|
||||
cor.astype(np.float32).tofile(coherenceFile)
|
||||
create_xml(coherenceFile, width2, length2, 'cor')
|
||||
|
||||
#create lower and upper band coherence files
|
||||
#lower
|
||||
amp1 = loweramp[:, 0:width2*2:2]
|
||||
amp2 = loweramp[:, 1:width2*2:2]
|
||||
cor[1:length2*2:2, :] = np.absolute(lowerint)/(amp1+(amp1==0))/(amp2+(amp2==0))*(amp1!=0)*(amp2!=0)
|
||||
cor.astype(np.float32).tofile(lowerbandCoherenceFile)
|
||||
create_xml(lowerbandCoherenceFile, width2, length2, 'cor')
|
||||
|
||||
#upper
|
||||
amp1 = upperamp[:, 0:width2*2:2]
|
||||
amp2 = upperamp[:, 1:width2*2:2]
|
||||
cor[1:length2*2:2, :] = np.absolute(upperint)/(amp1+(amp1==0))/(amp2+(amp2==0))*(amp1!=0)*(amp2!=0)
|
||||
cor.astype(np.float32).tofile(upperbandCoherenceFile)
|
||||
create_xml(upperbandCoherenceFile, width2, length2, 'cor')
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 3. filtering subband interferograms
|
||||
############################################################
|
||||
from contrib.alos2filter.alos2filter import psfilt1
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from mroipac.icu.Icu import Icu
|
||||
|
||||
for k in range(2):
|
||||
#1. filtering subband interferogram
|
||||
if self.filterSubbandInt:
|
||||
toBeFiltered = 'tmp.int'
|
||||
if self.removeMagnitudeBeforeFilteringSubbandInt:
|
||||
cmd = "imageMath.py -e='a/(abs(a)+(a==0))' --a={} -o {} -t cfloat -s BSQ".format(subbandPrefix[k]+ml2+'.int', toBeFiltered)
|
||||
else:
|
||||
#scale the inteferogram, otherwise its magnitude is too large for filtering
|
||||
cmd = "imageMath.py -e='a/100000.0' --a={} -o {} -t cfloat -s BSQ".format(subbandPrefix[k]+ml2+'.int', toBeFiltered)
|
||||
runCmd(cmd)
|
||||
|
||||
intImage = isceobj.createIntImage()
|
||||
intImage.load(toBeFiltered + '.xml')
|
||||
width = intImage.width
|
||||
length = intImage.length
|
||||
|
||||
windowSize = self.filterWinsizeSubbandInt
|
||||
stepSize = self.filterStepsizeSubbandInt
|
||||
psfilt1(toBeFiltered, 'filt_'+subbandPrefix[k]+ml2+'.int', width, self.filterStrengthSubbandInt, windowSize, stepSize)
|
||||
create_xml('filt_'+subbandPrefix[k]+ml2+'.int', width, length, 'int')
|
||||
|
||||
os.remove(toBeFiltered)
|
||||
os.remove(toBeFiltered + '.vrt')
|
||||
os.remove(toBeFiltered + '.xml')
|
||||
|
||||
toBeUsedInPhsig = 'filt_'+subbandPrefix[k]+ml2+'.int'
|
||||
else:
|
||||
toBeUsedInPhsig = subbandPrefix[k]+ml2+'.int'
|
||||
|
||||
#2. create phase sigma for phase unwrapping
|
||||
#recreate filtered image
|
||||
filtImage = isceobj.createIntImage()
|
||||
filtImage.load(toBeUsedInPhsig + '.xml')
|
||||
filtImage.setAccessMode('read')
|
||||
filtImage.createImage()
|
||||
|
||||
#amplitude image
|
||||
ampImage = isceobj.createAmpImage()
|
||||
ampImage.load(subbandPrefix[k]+ml2+'.amp' + '.xml')
|
||||
ampImage.setAccessMode('read')
|
||||
ampImage.createImage()
|
||||
|
||||
#phase sigma correlation image
|
||||
phsigImage = isceobj.createImage()
|
||||
phsigImage.setFilename(subbandPrefix[k]+ml2+'.phsig')
|
||||
phsigImage.setWidth(filtImage.width)
|
||||
phsigImage.dataType='FLOAT'
|
||||
phsigImage.bands = 1
|
||||
phsigImage.setImageType('cor')
|
||||
phsigImage.setAccessMode('write')
|
||||
phsigImage.createImage()
|
||||
|
||||
icu = Icu(name='insarapp_filter_icu')
|
||||
icu.configure()
|
||||
icu.unwrappingFlag = False
|
||||
icu.icu(intImage = filtImage, ampImage=ampImage, phsigImage=phsigImage)
|
||||
|
||||
phsigImage.renderHdr()
|
||||
|
||||
filtImage.finalizeImage()
|
||||
ampImage.finalizeImage()
|
||||
phsigImage.finalizeImage()
|
||||
|
||||
|
||||
############################################################
|
||||
# STEP 4. phase unwrapping
|
||||
############################################################
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrap
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import snaphuUnwrapOriginal
|
||||
|
||||
for k in range(2):
|
||||
tmid = referenceTrack.sensingStart + datetime.timedelta(seconds=(self._insar.numberAzimuthLooks1-1.0)/2.0*referenceTrack.azimuthLineInterval+
|
||||
referenceTrack.numberOfLines/2.0*self._insar.numberAzimuthLooks1*referenceTrack.azimuthLineInterval)
|
||||
|
||||
if self.filterSubbandInt:
|
||||
toBeUnwrapped = 'filt_'+subbandPrefix[k]+ml2+'.int'
|
||||
coherenceFile = subbandPrefix[k]+ml2+'.phsig'
|
||||
else:
|
||||
toBeUnwrapped = subbandPrefix[k]+ml2+'.int'
|
||||
coherenceFile = 'diff'+ml2+'.cor'
|
||||
|
||||
#if shutil.which('snaphu') != None:
|
||||
#do not use original snaphu now
|
||||
if False:
|
||||
print('\noriginal snaphu program found')
|
||||
print('unwrap {} using original snaphu, rather than that in ISCE'.format(toBeUnwrapped))
|
||||
snaphuUnwrapOriginal(toBeUnwrapped,
|
||||
subbandPrefix[k]+ml2+'.phsig',
|
||||
subbandPrefix[k]+ml2+'.amp',
|
||||
subbandPrefix[k]+ml2+'.unw',
|
||||
costMode = 's',
|
||||
initMethod = 'mcf',
|
||||
snaphuConfFile = '{}_snaphu.conf'.format(subbandPrefix[k]))
|
||||
else:
|
||||
snaphuUnwrap(referenceTrack, tmid,
|
||||
toBeUnwrapped,
|
||||
coherenceFile,
|
||||
subbandPrefix[k]+ml2+'.unw',
|
||||
self._insar.numberRangeLooks1*self._insar.numberRangeLooksIon,
|
||||
self._insar.numberAzimuthLooks1*self._insar.numberAzimuthLooksIon,
|
||||
costMode = 'SMOOTH',initMethod = 'MCF', defomax = 2, initOnly = True)
|
||||
|
||||
|
|
@ -0,0 +1,86 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from contrib.alos2proc.alos2proc import look
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runLook')
|
||||
|
||||
def runLook(self):
|
||||
'''take looks
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
#referenceTrack = self._insar.loadTrack(reference=True)
|
||||
#secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
wbdFile = os.path.abspath(self._insar.wbd)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
|
||||
amp = isceobj.createImage()
|
||||
amp.load(self._insar.amplitude+'.xml')
|
||||
width = amp.width
|
||||
length = amp.length
|
||||
width2 = int(width / self._insar.numberRangeLooks2)
|
||||
length2 = int(length / self._insar.numberAzimuthLooks2)
|
||||
|
||||
if not ((self._insar.numberRangeLooks2 == 1) and (self._insar.numberAzimuthLooks2 == 1)):
|
||||
#take looks
|
||||
look(self._insar.differentialInterferogram, self._insar.multilookDifferentialInterferogram, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 4, 0, 1)
|
||||
look(self._insar.amplitude, self._insar.multilookAmplitude, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 4, 1, 1)
|
||||
look(self._insar.latitude, self._insar.multilookLatitude, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 3, 0, 1)
|
||||
look(self._insar.longitude, self._insar.multilookLongitude, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 3, 0, 1)
|
||||
look(self._insar.height, self._insar.multilookHeight, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 3, 0, 1)
|
||||
#creat xml
|
||||
create_xml(self._insar.multilookDifferentialInterferogram, width2, length2, 'int')
|
||||
create_xml(self._insar.multilookAmplitude, width2, length2, 'amp')
|
||||
create_xml(self._insar.multilookLatitude, width2, length2, 'double')
|
||||
create_xml(self._insar.multilookLongitude, width2, length2, 'double')
|
||||
create_xml(self._insar.multilookHeight, width2, length2, 'double')
|
||||
#los has two bands, use look program in isce instead
|
||||
#cmd = "looks.py -i {} -o {} -r {} -a {}".format(self._insar.los, self._insar.multilookLos, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2)
|
||||
#runCmd(cmd)
|
||||
|
||||
#replace the above system call with function call
|
||||
from mroipac.looks.Looks import Looks
|
||||
from isceobj.Image import createImage
|
||||
inImage = createImage()
|
||||
inImage.load(self._insar.los+'.xml')
|
||||
|
||||
lkObj = Looks()
|
||||
lkObj.setDownLooks(self._insar.numberAzimuthLooks2)
|
||||
lkObj.setAcrossLooks(self._insar.numberRangeLooks2)
|
||||
lkObj.setInputImage(inImage)
|
||||
lkObj.setOutputFilename(self._insar.multilookLos)
|
||||
lkObj.looks()
|
||||
|
||||
#water body
|
||||
#this looking operation has no problems where there is only water and land, but there is also possible no-data area
|
||||
#look(self._insar.wbdOut, self._insar.multilookWbdOut, width, self._insar.numberRangeLooks2, self._insar.numberAzimuthLooks2, 0, 0, 1)
|
||||
#create_xml(self._insar.multilookWbdOut, width2, length2, 'byte')
|
||||
#use waterBodyRadar instead to avoid the problems of no-data pixels in water body
|
||||
waterBodyRadar(self._insar.multilookLatitude, self._insar.multilookLongitude, wbdFile, self._insar.multilookWbdOut)
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runLook")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,466 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import overlapFrequency
|
||||
from contrib.alos2proc.alos2proc import rg_filter
|
||||
from contrib.alos2proc.alos2proc import resamp
|
||||
from contrib.alos2proc.alos2proc import mbf
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runPrepareSlc')
|
||||
|
||||
def runPrepareSlc(self):
|
||||
'''Extract images.
|
||||
'''
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
|
||||
####################################################
|
||||
#1. crop slc
|
||||
####################################################
|
||||
#for ScanSAR-stripmap interferometry, we always crop slcs
|
||||
#for other cases, up to users
|
||||
if ((self._insar.modeCombination == 31) or (self._insar.modeCombination == 32)) or (self.cropSlc):
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
os.chdir(swathDir)
|
||||
|
||||
print('cropping frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
#crop reference
|
||||
cropSlc(referenceTrack.orbit, referenceSwath, self._insar.referenceSlc, secondaryTrack.orbit, secondarySwath, edge=0, useVirtualFile=self.useVirtualFile)
|
||||
#crop secondary, since secondary may go through resampling, we set edge=9
|
||||
#cropSlc(secondaryTrack.orbit, secondarySwath, self._insar.secondarySlc, referenceTrack.orbit, referenceSwath, edge=9, useVirtualFile=self.useVirtualFile)
|
||||
cropSlc(secondaryTrack.orbit, secondarySwath, self._insar.secondarySlc, referenceTrack.orbit, referenceSwath, edge=0, useVirtualFile=self.useVirtualFile)
|
||||
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
####################################################
|
||||
#2. range-filter slc
|
||||
####################################################
|
||||
#compute filtering parameters, radarwavelength and range bandwidth should be the same across all swaths and frames
|
||||
centerfreq1 = SPEED_OF_LIGHT / referenceTrack.radarWavelength
|
||||
bandwidth1 = referenceTrack.frames[0].swaths[0].rangeBandwidth
|
||||
centerfreq2 = SPEED_OF_LIGHT / secondaryTrack.radarWavelength
|
||||
bandwidth2 = secondaryTrack.frames[0].swaths[0].rangeBandwidth
|
||||
overlapfreq = overlapFrequency(centerfreq1, bandwidth1, centerfreq2, bandwidth2)
|
||||
|
||||
if overlapfreq == None:
|
||||
raise Exception('there is no overlap bandwidth in range')
|
||||
overlapbandwidth = overlapfreq[1] - overlapfreq[0]
|
||||
if overlapbandwidth < 3e6:
|
||||
print('overlap bandwidth: {}, percentage: {}%'.format(overlapbandwidth, 100.0*overlapbandwidth/bandwidth1))
|
||||
raise Exception('there is not enough overlap bandwidth in range')
|
||||
centerfreq = (overlapfreq[1] + overlapfreq[0]) / 2.0
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
os.chdir(swathDir)
|
||||
|
||||
print('range filtering frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
# #compute filtering parameters
|
||||
# centerfreq1 = SPEED_OF_LIGHT / referenceTrack.radarWavelength
|
||||
# bandwidth1 = referenceSwath.rangeBandwidth
|
||||
# centerfreq2 = SPEED_OF_LIGHT / secondaryTrack.radarWavelength
|
||||
# bandwidth2 = secondarySwath.rangeBandwidth
|
||||
# overlapfreq = overlapFrequency(centerfreq1, bandwidth1, centerfreq2, bandwidth2)
|
||||
|
||||
# if overlapfreq == None:
|
||||
# raise Exception('there is no overlap bandwidth in range')
|
||||
# overlapbandwidth = overlapfreq[1] - overlapfreq[0]
|
||||
# if overlapbandwidth < 3e6:
|
||||
# print('overlap bandwidth: {}, percentage: {}%'.format(overlapbandwidth, 100.0*overlapbandwidth/bandwidth1))
|
||||
# raise Exception('there is not enough overlap bandwidth in range')
|
||||
# centerfreq = (overlapfreq[1] + overlapfreq[0]) / 2.0
|
||||
|
||||
#filter reference
|
||||
if abs(centerfreq1 - centerfreq) < 1.0 and (bandwidth1 - 1.0) < overlapbandwidth:
|
||||
print('no need to range filter {}'.format(self._insar.referenceSlc))
|
||||
else:
|
||||
print('range filter {}'.format(self._insar.referenceSlc))
|
||||
tmpSlc = 'tmp.slc'
|
||||
rg_filter(self._insar.referenceSlc, 1, [tmpSlc], [overlapbandwidth / referenceSwath.rangeSamplingRate],
|
||||
[(centerfreq - centerfreq1) / referenceSwath.rangeSamplingRate],
|
||||
257, 2048, 0.1, 0, 0.0)
|
||||
|
||||
if os.path.isfile(self._insar.referenceSlc):
|
||||
os.remove(self._insar.referenceSlc)
|
||||
os.remove(self._insar.referenceSlc+'.vrt')
|
||||
os.remove(self._insar.referenceSlc+'.xml')
|
||||
|
||||
img = isceobj.createSlcImage()
|
||||
img.load(tmpSlc + '.xml')
|
||||
#remove original
|
||||
os.remove(tmpSlc + '.vrt')
|
||||
os.remove(tmpSlc + '.xml')
|
||||
os.rename(tmpSlc, self._insar.referenceSlc)
|
||||
#creat new
|
||||
img.setFilename(self._insar.referenceSlc)
|
||||
img.extraFilename = self._insar.referenceSlc + '.vrt'
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
referenceTrack.radarWavelength = SPEED_OF_LIGHT/centerfreq
|
||||
referenceSwath.rangeBandwidth = overlapbandwidth
|
||||
|
||||
#filter secondary
|
||||
if abs(centerfreq2 - centerfreq) < 1.0 and (bandwidth2 - 1.0) < overlapbandwidth:
|
||||
print('no need to range filter {}'.format(self._insar.secondarySlc))
|
||||
else:
|
||||
print('range filter {}'.format(self._insar.secondarySlc))
|
||||
tmpSlc = 'tmp.slc'
|
||||
rg_filter(self._insar.secondarySlc, 1, [tmpSlc], [overlapbandwidth / secondarySwath.rangeSamplingRate],
|
||||
[(centerfreq - centerfreq2) / secondarySwath.rangeSamplingRate],
|
||||
257, 2048, 0.1, 0, 0.0)
|
||||
|
||||
if os.path.isfile(self._insar.secondarySlc):
|
||||
os.remove(self._insar.secondarySlc)
|
||||
os.remove(self._insar.secondarySlc+'.vrt')
|
||||
os.remove(self._insar.secondarySlc+'.xml')
|
||||
|
||||
img = isceobj.createSlcImage()
|
||||
img.load(tmpSlc + '.xml')
|
||||
#remove original
|
||||
os.remove(tmpSlc + '.vrt')
|
||||
os.remove(tmpSlc + '.xml')
|
||||
os.rename(tmpSlc, self._insar.secondarySlc)
|
||||
#creat new
|
||||
img.setFilename(self._insar.secondarySlc)
|
||||
img.extraFilename = self._insar.secondarySlc + '.vrt'
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
secondaryTrack.radarWavelength = SPEED_OF_LIGHT/centerfreq
|
||||
secondarySwath.rangeBandwidth = overlapbandwidth
|
||||
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
####################################################
|
||||
#3. equalize sample size
|
||||
####################################################
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
os.chdir(swathDir)
|
||||
|
||||
print('equalize sample size frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
if abs(referenceSwath.rangeSamplingRate - secondarySwath.rangeSamplingRate) < 1.0 and abs(referenceSwath.prf - secondarySwath.prf) < 1.0:
|
||||
print('no need to resample {}.'.format(self._insar.secondarySlc))
|
||||
else:
|
||||
outWidth = round(secondarySwath.numberOfSamples / secondarySwath.rangeSamplingRate * referenceSwath.rangeSamplingRate)
|
||||
outLength = round(secondarySwath.numberOfLines / secondarySwath.prf * referenceSwath.prf)
|
||||
|
||||
tmpSlc = 'tmp.slc'
|
||||
resamp(self._insar.secondarySlc, tmpSlc, 'fake', 'fake', outWidth, outLength, secondarySwath.prf, secondarySwath.dopplerVsPixel,
|
||||
rgcoef=[0.0, (1.0/referenceSwath.rangeSamplingRate) / (1.0/secondarySwath.rangeSamplingRate) - 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
||||
azcoef=[0.0, 0.0, (1.0/referenceSwath.prf) / (1.0/secondarySwath.prf) - 1.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
||||
azpos_off=0.0)
|
||||
|
||||
if os.path.isfile(self._insar.secondarySlc):
|
||||
os.remove(self._insar.secondarySlc)
|
||||
os.remove(self._insar.secondarySlc+'.vrt')
|
||||
os.remove(self._insar.secondarySlc+'.xml')
|
||||
|
||||
img = isceobj.createSlcImage()
|
||||
img.load(tmpSlc + '.xml')
|
||||
#remove original
|
||||
os.remove(tmpSlc + '.vrt')
|
||||
os.remove(tmpSlc + '.xml')
|
||||
os.rename(tmpSlc, self._insar.secondarySlc)
|
||||
#creat new
|
||||
img.setFilename(self._insar.secondarySlc)
|
||||
img.extraFilename = self._insar.secondarySlc + '.vrt'
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
#update parameters
|
||||
#update doppler and azfmrate first
|
||||
index2 = np.arange(outWidth)
|
||||
index = np.arange(outWidth) * (1.0/referenceSwath.rangeSamplingRate) / (1.0/secondarySwath.rangeSamplingRate)
|
||||
dop = np.polyval(secondarySwath.dopplerVsPixel[::-1], index)
|
||||
p = np.polyfit(index2, dop, 3)
|
||||
secondarySwath.dopplerVsPixel = [p[3], p[2], p[1], p[0]]
|
||||
|
||||
azfmrate = np.polyval(secondarySwath.azimuthFmrateVsPixel[::-1], index)
|
||||
p = np.polyfit(index2, azfmrate, 3)
|
||||
secondarySwath.azimuthFmrateVsPixel = [p[3], p[2], p[1], p[0]]
|
||||
|
||||
secondarySwath.numberOfSamples = outWidth
|
||||
secondarySwath.numberOfLines = outLength
|
||||
|
||||
secondarySwath.prf = referenceSwath.prf
|
||||
secondarySwath.rangeSamplingRate = referenceSwath.rangeSamplingRate
|
||||
secondarySwath.rangePixelSize = referenceSwath.rangePixelSize
|
||||
secondarySwath.azimuthPixelSize = referenceSwath.azimuthPixelSize
|
||||
secondarySwath.azimuthLineInterval = referenceSwath.azimuthLineInterval
|
||||
secondarySwath.prfFraction = referenceSwath.prfFraction
|
||||
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
|
||||
####################################################
|
||||
#4. mbf
|
||||
####################################################
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
os.chdir(swathDir)
|
||||
|
||||
print('azimuth filter frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
#using Piyush's code for computing range and azimuth offsets
|
||||
midRange = referenceSwath.startingRange + referenceSwath.rangePixelSize * referenceSwath.numberOfSamples * 0.5
|
||||
midSensingStart = referenceSwath.sensingStart + datetime.timedelta(seconds = referenceSwath.numberOfLines * 0.5 / referenceSwath.prf)
|
||||
llh = referenceTrack.orbit.rdr2geo(midSensingStart, midRange)
|
||||
slvaz, slvrng = secondaryTrack.orbit.geo2rdr(llh)
|
||||
###Translate to offsets
|
||||
#at this point, secondary range pixel size and prf should be the same as those of reference
|
||||
rgoff = ((slvrng - secondarySwath.startingRange) / referenceSwath.rangePixelSize) - referenceSwath.numberOfSamples * 0.5
|
||||
azoff = ((slvaz - secondarySwath.sensingStart).total_seconds() * referenceSwath.prf) - referenceSwath.numberOfLines * 0.5
|
||||
|
||||
#filter reference
|
||||
if not ((self._insar.modeCombination == 21) and (self._insar.burstSynchronization <= self.burstSynchronizationThreshold)):
|
||||
print('no need to azimuth filter {}.'.format(self._insar.referenceSlc))
|
||||
else:
|
||||
index = np.arange(referenceSwath.numberOfSamples) + rgoff
|
||||
dop = np.polyval(secondarySwath.dopplerVsPixel[::-1], index)
|
||||
p = np.polyfit(index-rgoff, dop, 3)
|
||||
dopplerVsPixelSecondary = [p[3], p[2], p[1], p[0]]
|
||||
|
||||
tmpSlc = 'tmp.slc'
|
||||
mbf(self._insar.referenceSlc, tmpSlc, referenceSwath.prf, 1.0,
|
||||
referenceSwath.burstLength, referenceSwath.burstCycleLength-referenceSwath.burstLength,
|
||||
self._insar.burstUnsynchronizedTime * referenceSwath.prf,
|
||||
(referenceSwath.burstStartTime - referenceSwath.sensingStart).total_seconds() * referenceSwath.prf,
|
||||
referenceSwath.azimuthFmrateVsPixel, referenceSwath.dopplerVsPixel, dopplerVsPixelSecondary)
|
||||
|
||||
if os.path.isfile(self._insar.referenceSlc):
|
||||
os.remove(self._insar.referenceSlc)
|
||||
os.remove(self._insar.referenceSlc+'.vrt')
|
||||
os.remove(self._insar.referenceSlc+'.xml')
|
||||
|
||||
img = isceobj.createSlcImage()
|
||||
img.load(tmpSlc + '.xml')
|
||||
#remove original
|
||||
os.remove(tmpSlc + '.vrt')
|
||||
os.remove(tmpSlc + '.xml')
|
||||
os.rename(tmpSlc, self._insar.referenceSlc)
|
||||
#creat new
|
||||
img.setFilename(self._insar.referenceSlc)
|
||||
img.extraFilename = self._insar.referenceSlc + '.vrt'
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
#filter secondary
|
||||
if not(
|
||||
((self._insar.modeCombination == 21) and (self._insar.burstSynchronization <= self.burstSynchronizationThreshold)) or \
|
||||
(self._insar.modeCombination == 31)
|
||||
):
|
||||
print('no need to azimuth filter {}.'.format(self._insar.secondarySlc))
|
||||
else:
|
||||
index = np.arange(secondarySwath.numberOfSamples) - rgoff
|
||||
dop = np.polyval(referenceSwath.dopplerVsPixel[::-1], index)
|
||||
p = np.polyfit(index+rgoff, dop, 3)
|
||||
dopplerVsPixelReference = [p[3], p[2], p[1], p[0]]
|
||||
|
||||
tmpSlc = 'tmp.slc'
|
||||
mbf(self._insar.secondarySlc, tmpSlc, secondarySwath.prf, 1.0,
|
||||
secondarySwath.burstLength, secondarySwath.burstCycleLength-secondarySwath.burstLength,
|
||||
-self._insar.burstUnsynchronizedTime * secondarySwath.prf,
|
||||
(secondarySwath.burstStartTime - secondarySwath.sensingStart).total_seconds() * secondarySwath.prf,
|
||||
secondarySwath.azimuthFmrateVsPixel, secondarySwath.dopplerVsPixel, dopplerVsPixelReference)
|
||||
|
||||
if os.path.isfile(self._insar.secondarySlc):
|
||||
os.remove(self._insar.secondarySlc)
|
||||
os.remove(self._insar.secondarySlc+'.vrt')
|
||||
os.remove(self._insar.secondarySlc+'.xml')
|
||||
|
||||
img = isceobj.createSlcImage()
|
||||
img.load(tmpSlc + '.xml')
|
||||
#remove original
|
||||
os.remove(tmpSlc + '.vrt')
|
||||
os.remove(tmpSlc + '.xml')
|
||||
os.rename(tmpSlc, self._insar.secondarySlc)
|
||||
#creat new
|
||||
img.setFilename(self._insar.secondarySlc)
|
||||
img.extraFilename = self._insar.secondarySlc + '.vrt'
|
||||
img.setAccessMode('READ')
|
||||
img.renderHdr()
|
||||
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
#in case parameters changed
|
||||
self._insar.saveTrack(referenceTrack, reference=True)
|
||||
self._insar.saveTrack(secondaryTrack, reference=False)
|
||||
|
||||
catalog.printToLog(logger, "runPrepareSlc")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def cropSlc(orbit, swath, slc, orbit2, swath2, edge=0, useVirtualFile=True):
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import find_vrt_keyword
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
'''
|
||||
orbit: orbit of the image to be cropped
|
||||
swath: swath of the image to be cropped
|
||||
slc: image to be cropped
|
||||
orbit2: orbit of the other image
|
||||
swath2: swath of the other image
|
||||
'''
|
||||
|
||||
#find topleft and lowerright corners
|
||||
#all indices start with 0
|
||||
corner = []
|
||||
for x in [[0, 0], [swath2.numberOfLines -1, swath2.numberOfSamples-1]]:
|
||||
line2 = x[0]
|
||||
sample2 = x[1]
|
||||
rg2 = swath2.startingRange + swath2.rangePixelSize * sample2
|
||||
az2 = swath2.sensingStart + datetime.timedelta(seconds = line2 / swath2.prf)
|
||||
llh2 = orbit2.rdr2geo(az2, rg2)
|
||||
az, rg = orbit.geo2rdr(llh2)
|
||||
line = (az - swath.sensingStart).total_seconds() * swath.prf
|
||||
sample = (rg - swath.startingRange) / swath.rangePixelSize
|
||||
corner.append([line, sample])
|
||||
|
||||
#image (to be cropped) bounds
|
||||
firstLine = 0
|
||||
lastLine = swath.numberOfLines-1
|
||||
firstSample = 0
|
||||
lastSample = swath.numberOfSamples-1
|
||||
|
||||
#the othe image bounds in image (to be cropped)
|
||||
#add edge
|
||||
#edge = 9
|
||||
firstLine2 = int(corner[0][0] - edge)
|
||||
lastLine2 = int(corner[1][0] + edge)
|
||||
firstSample2 = int(corner[0][1] - edge)
|
||||
lastSample2 = int(corner[1][1] + edge)
|
||||
|
||||
#image (to be cropped) output bounds
|
||||
firstLine3 = max(firstLine, firstLine2)
|
||||
lastLine3 = min(lastLine, lastLine2)
|
||||
firstSample3 = max(firstSample, firstSample2)
|
||||
lastSample3 = min(lastSample, lastSample2)
|
||||
numberOfSamples3 = lastSample3-firstSample3+1
|
||||
numberOfLines3 = lastLine3-firstLine3+1
|
||||
|
||||
#check if there is overlap
|
||||
if lastLine3 - firstLine3 +1 < 1000:
|
||||
raise Exception('azimuth overlap < 1000 lines, not enough area for InSAR\n')
|
||||
if lastSample3 - firstSample3 +1 < 1000:
|
||||
raise Exception('range overlap < 1000 samples, not enough area for InSAR\n')
|
||||
|
||||
#check if there is a need to crop image
|
||||
if abs(firstLine3-firstLine) < 100 and abs(lastLine3-lastLine) < 100 and \
|
||||
abs(firstSample3-firstSample) < 100 and abs(lastSample3-lastSample) < 100:
|
||||
print('no need to crop {}. nothing is done by crop.'.format(slc))
|
||||
return
|
||||
|
||||
#crop image
|
||||
if useVirtualFile:
|
||||
#vrt
|
||||
SourceFilename = find_vrt_keyword(slc+'.vrt', 'SourceFilename')
|
||||
ImageOffset = int(find_vrt_keyword(slc+'.vrt', 'ImageOffset'))
|
||||
PixelOffset = int(find_vrt_keyword(slc+'.vrt', 'PixelOffset'))
|
||||
LineOffset = int(find_vrt_keyword(slc+'.vrt', 'LineOffset'))
|
||||
|
||||
#overwrite vrt and xml
|
||||
img = isceobj.createImage()
|
||||
img.load(slc+'.xml')
|
||||
img.width = numberOfSamples3
|
||||
img.length = numberOfLines3
|
||||
img.renderHdr()
|
||||
|
||||
#overrite vrt
|
||||
with open(slc+'.vrt', 'w') as fid:
|
||||
fid.write('''<VRTDataset rasterXSize="{0}" rasterYSize="{1}">
|
||||
<VRTRasterBand band="1" dataType="CFloat32" subClass="VRTRawRasterBand">
|
||||
<SourceFilename relativeToVRT="0">{2}</SourceFilename>
|
||||
<ByteOrder>MSB</ByteOrder>
|
||||
<ImageOffset>{3}</ImageOffset>
|
||||
<PixelOffset>8</PixelOffset>
|
||||
<LineOffset>{4}</LineOffset>
|
||||
</VRTRasterBand>
|
||||
</VRTDataset>'''.format(numberOfSamples3,
|
||||
numberOfLines3,
|
||||
SourceFilename,
|
||||
ImageOffset + firstLine3*LineOffset + firstSample3*8,
|
||||
LineOffset))
|
||||
else:
|
||||
#read and crop data
|
||||
with open(slc, 'rb') as f:
|
||||
f.seek(firstLine3 * swath.numberOfSamples * np.dtype(np.complex64).itemsize, 0)
|
||||
data = np.fromfile(f, dtype=np.complex64, count=numberOfLines3 * swath.numberOfSamples)\
|
||||
.reshape(numberOfLines3,swath.numberOfSamples)
|
||||
data2 = data[:, firstSample3:lastSample3+1]
|
||||
#overwrite original
|
||||
data2.astype(np.complex64).tofile(slc)
|
||||
|
||||
#creat new vrt and xml
|
||||
os.remove(slc + '.xml')
|
||||
os.remove(slc + '.vrt')
|
||||
create_xml(slc, numberOfSamples3, numberOfLines3, 'slc')
|
||||
|
||||
#update parameters
|
||||
#update doppler and azfmrate first
|
||||
dop = np.polyval(swath.dopplerVsPixel[::-1], np.arange(swath.numberOfSamples))
|
||||
dop3 = dop[firstSample3:lastSample3+1]
|
||||
p = np.polyfit(np.arange(numberOfSamples3), dop3, 3)
|
||||
swath.dopplerVsPixel = [p[3], p[2], p[1], p[0]]
|
||||
|
||||
azfmrate = np.polyval(swath.azimuthFmrateVsPixel[::-1], np.arange(swath.numberOfSamples))
|
||||
azfmrate3 = azfmrate[firstSample3:lastSample3+1]
|
||||
p = np.polyfit(np.arange(numberOfSamples3), azfmrate3, 3)
|
||||
swath.azimuthFmrateVsPixel = [p[3], p[2], p[1], p[0]]
|
||||
|
||||
swath.numberOfSamples = numberOfSamples3
|
||||
swath.numberOfLines = numberOfLines3
|
||||
|
||||
swath.startingRange += firstSample3 * swath.rangePixelSize
|
||||
swath.sensingStart += datetime.timedelta(seconds = firstLine3 / swath.prf)
|
||||
|
||||
#no need to update frame and track, as parameters requiring changes are determined
|
||||
#in swath and frame mosaicking, which is not yet done at this point.
|
||||
|
||||
|
|
@ -0,0 +1,345 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
import isceobj.Sensor.MultiMode as MultiMode
|
||||
from isceobj.Planet.Planet import Planet
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxRdr
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import getBboxGeo
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import modeProcParDict
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runPreprocessor')
|
||||
|
||||
def runPreprocessor(self):
|
||||
'''Extract images.
|
||||
'''
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
|
||||
|
||||
#find files
|
||||
#actually no need to use absolute path any longer, since we are able to find file from vrt now. 27-JAN-2020, CRL.
|
||||
#denseoffset may still need absolute path when making links
|
||||
self.referenceDir = os.path.abspath(self.referenceDir)
|
||||
self.secondaryDir = os.path.abspath(self.secondaryDir)
|
||||
|
||||
ledFilesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'LED-ALOS2*-*-*')))
|
||||
imgFilesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*-*-*'.format(self.referencePolarization.upper()))))
|
||||
|
||||
ledFilesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'LED-ALOS2*-*-*')))
|
||||
imgFilesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*-*-*'.format(self.secondaryPolarization.upper()))))
|
||||
|
||||
firstFrameReference = ledFilesReference[0].split('-')[-3][-4:]
|
||||
firstFrameSecondary = ledFilesSecondary[0].split('-')[-3][-4:]
|
||||
firstFrameImagesReference = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.referencePolarization.upper(), firstFrameReference))))
|
||||
firstFrameImagesSecondary = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.secondaryPolarization.upper(), firstFrameSecondary))))
|
||||
|
||||
|
||||
#determin operation mode
|
||||
referenceMode = os.path.basename(ledFilesReference[0]).split('-')[-1][0:3]
|
||||
secondaryMode = os.path.basename(ledFilesSecondary[0]).split('-')[-1][0:3]
|
||||
spotlightModes = ['SBS']
|
||||
stripmapModes = ['UBS', 'UBD', 'HBS', 'HBD', 'HBQ', 'FBS', 'FBD', 'FBQ']
|
||||
scansarNominalModes = ['WBS', 'WBD', 'WWS', 'WWD']
|
||||
scansarWideModes = ['VBS', 'VBD']
|
||||
scansarModes = ['WBS', 'WBD', 'WWS', 'WWD', 'VBS', 'VBD']
|
||||
|
||||
#usable combinations
|
||||
if (referenceMode in spotlightModes) and (secondaryMode in spotlightModes):
|
||||
self._insar.modeCombination = 0
|
||||
elif (referenceMode in stripmapModes) and (secondaryMode in stripmapModes):
|
||||
self._insar.modeCombination = 1
|
||||
elif (referenceMode in scansarNominalModes) and (secondaryMode in scansarNominalModes):
|
||||
self._insar.modeCombination = 21
|
||||
elif (referenceMode in scansarWideModes) and (secondaryMode in scansarWideModes):
|
||||
self._insar.modeCombination = 22
|
||||
elif (referenceMode in scansarNominalModes) and (secondaryMode in stripmapModes):
|
||||
self._insar.modeCombination = 31
|
||||
elif (referenceMode in scansarWideModes) and (secondaryMode in stripmapModes):
|
||||
self._insar.modeCombination = 32
|
||||
else:
|
||||
print('\n\nthis mode combination is not possible')
|
||||
print('note that for ScanSAR-stripmap, ScanSAR must be reference\n\n')
|
||||
raise Exception('mode combination not supported')
|
||||
|
||||
# pixel size from real data processing. azimuth pixel size may change a bit as
|
||||
# the antenna points to a different swath and therefore uses a different PRF.
|
||||
|
||||
# MODE RANGE PIXEL SIZE (LOOKS) AZIMUTH PIXEL SIZE (LOOKS)
|
||||
# -------------------------------------------------------------------
|
||||
# SPT [SBS]
|
||||
# 1.4304222392897463 (2) 0.9351804642158579 (4)
|
||||
# SM1 [UBS,UBD]
|
||||
# 1.4304222392897463 (2) 1.8291988125114438 (2)
|
||||
# SM2 [HBS,HBD,HBQ]
|
||||
# 2.8608444785794984 (2) 3.0672373839847196 (2)
|
||||
# SM3 [FBS,FBD,FBQ]
|
||||
# 4.291266717869248 (2) 3.2462615913656667 (4)
|
||||
|
||||
# WD1 [WBS,WBD] [WWS,WWD]
|
||||
# 8.582533435738496 (1) 2.6053935830031887 (14)
|
||||
# 8.582533435738496 (1) 2.092362043327227 (14)
|
||||
# 8.582533435738496 (1) 2.8817632034495717 (14)
|
||||
# 8.582533435738496 (1) 3.054362492601842 (14)
|
||||
# 8.582533435738496 (1) 2.4582084463356977 (14)
|
||||
|
||||
# WD2 [VBS,VBD]
|
||||
# 8.582533435738496 (1) 2.9215796012950728 (14)
|
||||
# 8.582533435738496 (1) 3.088859074497863 (14)
|
||||
# 8.582533435738496 (1) 2.8792293071133073 (14)
|
||||
# 8.582533435738496 (1) 3.0592146044234854 (14)
|
||||
# 8.582533435738496 (1) 2.8818767752199137 (14)
|
||||
# 8.582533435738496 (1) 3.047038521027477 (14)
|
||||
# 8.582533435738496 (1) 2.898816222039108 (14)
|
||||
|
||||
#determine default number of looks:
|
||||
self._insar.numberRangeLooks1 = self.numberRangeLooks1
|
||||
self._insar.numberAzimuthLooks1 = self.numberAzimuthLooks1
|
||||
self._insar.numberRangeLooks2 = self.numberRangeLooks2
|
||||
self._insar.numberAzimuthLooks2 = self.numberAzimuthLooks2
|
||||
#the following two will be automatically determined by runRdrDemOffset.py
|
||||
self._insar.numberRangeLooksSim = self.numberRangeLooksSim
|
||||
self._insar.numberAzimuthLooksSim = self.numberAzimuthLooksSim
|
||||
self._insar.numberRangeLooksIon = self.numberRangeLooksIon
|
||||
self._insar.numberAzimuthLooksIon = self.numberAzimuthLooksIon
|
||||
|
||||
if self._insar.numberRangeLooks1 is None:
|
||||
self._insar.numberRangeLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks1']
|
||||
if self._insar.numberAzimuthLooks1 is None:
|
||||
self._insar.numberAzimuthLooks1 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks1']
|
||||
|
||||
if self._insar.numberRangeLooks2 is None:
|
||||
self._insar.numberRangeLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooks2']
|
||||
if self._insar.numberAzimuthLooks2 is None:
|
||||
self._insar.numberAzimuthLooks2 = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooks2']
|
||||
|
||||
if self._insar.numberRangeLooksIon is None:
|
||||
self._insar.numberRangeLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberRangeLooksIon']
|
||||
if self._insar.numberAzimuthLooksIon is None:
|
||||
self._insar.numberAzimuthLooksIon = modeProcParDict['ALOS-2'][referenceMode]['numberAzimuthLooksIon']
|
||||
|
||||
|
||||
#define processing file names
|
||||
self._insar.referenceDate = os.path.basename(ledFilesReference[0]).split('-')[2]
|
||||
self._insar.secondaryDate = os.path.basename(ledFilesSecondary[0]).split('-')[2]
|
||||
self._insar.setFilename(referenceDate=self._insar.referenceDate, secondaryDate=self._insar.secondaryDate, nrlks1=self._insar.numberRangeLooks1, nalks1=self._insar.numberAzimuthLooks1, nrlks2=self._insar.numberRangeLooks2, nalks2=self._insar.numberAzimuthLooks2)
|
||||
|
||||
|
||||
#find frame numbers
|
||||
if (self._insar.modeCombination == 31) or (self._insar.modeCombination == 32):
|
||||
if (self.referenceFrames == None) or (self.secondaryFrames == None):
|
||||
raise Exception('for ScanSAR-stripmap inteferometry, you must set reference and secondary frame numbers')
|
||||
#if not set, find frames automatically
|
||||
if self.referenceFrames == None:
|
||||
self.referenceFrames = []
|
||||
for led in ledFilesReference:
|
||||
frameNumber = os.path.basename(led).split('-')[1][-4:]
|
||||
if frameNumber not in self.referenceFrames:
|
||||
self.referenceFrames.append(frameNumber)
|
||||
if self.secondaryFrames == None:
|
||||
self.secondaryFrames = []
|
||||
for led in ledFilesSecondary:
|
||||
frameNumber = os.path.basename(led).split('-')[1][-4:]
|
||||
if frameNumber not in self.secondaryFrames:
|
||||
self.secondaryFrames.append(frameNumber)
|
||||
#sort frames
|
||||
self.referenceFrames = sorted(self.referenceFrames)
|
||||
self.secondaryFrames = sorted(self.secondaryFrames)
|
||||
#check number of frames
|
||||
if len(self.referenceFrames) != len(self.secondaryFrames):
|
||||
raise Exception('number of frames in reference dir is not equal to number of frames \
|
||||
in secondary dir. please set frame number manually')
|
||||
|
||||
|
||||
#find swath numbers (if not ScanSAR-ScanSAR, compute valid swaths)
|
||||
if (self._insar.modeCombination == 0) or (self._insar.modeCombination == 1):
|
||||
self.startingSwath = 1
|
||||
self.endingSwath = 1
|
||||
|
||||
if self._insar.modeCombination == 21:
|
||||
if self.startingSwath == None:
|
||||
self.startingSwath = 1
|
||||
if self.endingSwath == None:
|
||||
self.endingSwath = 5
|
||||
|
||||
if self._insar.modeCombination == 22:
|
||||
if self.startingSwath == None:
|
||||
self.startingSwath = 1
|
||||
if self.endingSwath == None:
|
||||
self.endingSwath = 7
|
||||
|
||||
#determine starting and ending swaths for ScanSAR-stripmap, user's settings are overwritten
|
||||
#use first frame to check overlap
|
||||
if (self._insar.modeCombination == 31) or (self._insar.modeCombination == 32):
|
||||
if self._insar.modeCombination == 31:
|
||||
numberOfSwaths = 5
|
||||
else:
|
||||
numberOfSwaths = 7
|
||||
overlapSubswaths = []
|
||||
for i in range(numberOfSwaths):
|
||||
overlapRatio = check_overlap(ledFilesReference[0], firstFrameImagesReference[i], ledFilesSecondary[0], firstFrameImagesSecondary[0])
|
||||
if overlapRatio > 1.0 / 4.0:
|
||||
overlapSubswaths.append(i+1)
|
||||
if overlapSubswaths == []:
|
||||
raise Exception('There is no overlap area between the ScanSAR-stripmap pair')
|
||||
self.startingSwath = int(overlapSubswaths[0])
|
||||
self.endingSwath = int(overlapSubswaths[-1])
|
||||
|
||||
#save the valid frames and swaths for future processing
|
||||
self._insar.referenceFrames = self.referenceFrames
|
||||
self._insar.secondaryFrames = self.secondaryFrames
|
||||
self._insar.startingSwath = self.startingSwath
|
||||
self._insar.endingSwath = self.endingSwath
|
||||
|
||||
|
||||
##################################################
|
||||
#1. create directories and read data
|
||||
##################################################
|
||||
self.reference.configure()
|
||||
self.secondary.configure()
|
||||
self.reference.track.configure()
|
||||
self.secondary.track.configure()
|
||||
for i, (referenceFrame, secondaryFrame) in enumerate(zip(self._insar.referenceFrames, self._insar.secondaryFrames)):
|
||||
#frame number starts with 1
|
||||
frameDir = 'f{}_{}'.format(i+1, referenceFrame)
|
||||
os.makedirs(frameDir, exist_ok=True)
|
||||
os.chdir(frameDir)
|
||||
|
||||
#attach a frame to reference and secondary
|
||||
frameObjReference = MultiMode.createFrame()
|
||||
frameObjSecondary = MultiMode.createFrame()
|
||||
frameObjReference.configure()
|
||||
frameObjSecondary.configure()
|
||||
self.reference.track.frames.append(frameObjReference)
|
||||
self.secondary.track.frames.append(frameObjSecondary)
|
||||
|
||||
#swath number starts with 1
|
||||
for j in range(self._insar.startingSwath, self._insar.endingSwath+1):
|
||||
print('processing frame {} swath {}'.format(referenceFrame, j))
|
||||
|
||||
swathDir = 's{}'.format(j)
|
||||
os.makedirs(swathDir, exist_ok=True)
|
||||
os.chdir(swathDir)
|
||||
|
||||
#attach a swath to reference and secondary
|
||||
swathObjReference = MultiMode.createSwath()
|
||||
swathObjSecondary = MultiMode.createSwath()
|
||||
swathObjReference.configure()
|
||||
swathObjSecondary.configure()
|
||||
self.reference.track.frames[-1].swaths.append(swathObjReference)
|
||||
self.secondary.track.frames[-1].swaths.append(swathObjSecondary)
|
||||
|
||||
#setup reference
|
||||
self.reference.leaderFile = sorted(glob.glob(os.path.join(self.referenceDir, 'LED-ALOS2*{}-*-*'.format(referenceFrame))))[0]
|
||||
if referenceMode in scansarModes:
|
||||
self.reference.imageFile = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*-F{}'.format(self.referencePolarization.upper(), referenceFrame, j))))[0]
|
||||
else:
|
||||
self.reference.imageFile = sorted(glob.glob(os.path.join(self.referenceDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.referencePolarization.upper(), referenceFrame))))[0]
|
||||
self.reference.outputFile = self._insar.referenceSlc
|
||||
self.reference.useVirtualFile = self.useVirtualFile
|
||||
#read reference
|
||||
(imageFDR, imageData)=self.reference.readImage()
|
||||
(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=self.reference.readLeader()
|
||||
self.reference.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
|
||||
self.reference.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
|
||||
self.reference.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
|
||||
|
||||
#setup secondary
|
||||
self.secondary.leaderFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'LED-ALOS2*{}-*-*'.format(secondaryFrame))))[0]
|
||||
if secondaryMode in scansarModes:
|
||||
self.secondary.imageFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*-F{}'.format(self.secondaryPolarization.upper(), secondaryFrame, j))))[0]
|
||||
else:
|
||||
self.secondary.imageFile = sorted(glob.glob(os.path.join(self.secondaryDir, 'IMG-{}-ALOS2*{}-*-*'.format(self.secondaryPolarization.upper(), secondaryFrame))))[0]
|
||||
self.secondary.outputFile = self._insar.secondarySlc
|
||||
self.secondary.useVirtualFile = self.useVirtualFile
|
||||
#read secondary
|
||||
(imageFDR, imageData)=self.secondary.readImage()
|
||||
(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord)=self.secondary.readLeader()
|
||||
self.secondary.setSwath(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
|
||||
self.secondary.setFrame(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
|
||||
self.secondary.setTrack(leaderFDR, sceneHeaderRecord, platformPositionRecord, facilityRecord, imageFDR, imageData)
|
||||
|
||||
os.chdir('../')
|
||||
self._insar.saveProduct(self.reference.track.frames[-1], self._insar.referenceFrameParameter)
|
||||
self._insar.saveProduct(self.secondary.track.frames[-1], self._insar.secondaryFrameParameter)
|
||||
os.chdir('../')
|
||||
self._insar.saveProduct(self.reference.track, self._insar.referenceTrackParameter)
|
||||
self._insar.saveProduct(self.secondary.track, self._insar.secondaryTrackParameter)
|
||||
|
||||
|
||||
catalog.printToLog(logger, "runPreprocessor")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
||||
def check_overlap(ldr_m, img_m, ldr_s, img_s):
|
||||
from isceobj.Constants import SPEED_OF_LIGHT
|
||||
|
||||
rangeSamplingRateReference, widthReference, nearRangeReference = read_param_for_checking_overlap(ldr_m, img_m)
|
||||
rangeSamplingRateSecondary, widthSecondary, nearRangeSecondary = read_param_for_checking_overlap(ldr_s, img_s)
|
||||
|
||||
farRangeReference = nearRangeReference + (widthReference-1) * 0.5 * SPEED_OF_LIGHT / rangeSamplingRateReference
|
||||
farRangeSecondary = nearRangeSecondary + (widthSecondary-1) * 0.5 * SPEED_OF_LIGHT / rangeSamplingRateSecondary
|
||||
|
||||
#This should be good enough, although precise image offsets are not used.
|
||||
if farRangeReference <= nearRangeSecondary:
|
||||
overlapRatio = 0.0
|
||||
elif farRangeSecondary <= nearRangeReference:
|
||||
overlapRatio = 0.0
|
||||
else:
|
||||
# 0 1 2 3
|
||||
ranges = np.array([nearRangeReference, farRangeReference, nearRangeSecondary, farRangeSecondary])
|
||||
rangesIndex = np.argsort(ranges)
|
||||
overlapRatio = ranges[rangesIndex[2]]-ranges[rangesIndex[1]] / (farRangeReference-nearRangeReference)
|
||||
|
||||
return overlapRatio
|
||||
|
||||
|
||||
def read_param_for_checking_overlap(leader_file, image_file):
|
||||
from isceobj.Sensor import xmlPrefix
|
||||
import isceobj.Sensor.CEOS as CEOS
|
||||
|
||||
#read from leader file
|
||||
fsampConst = { 104: 1.047915957140240E+08,
|
||||
52: 5.239579785701190E+07,
|
||||
34: 3.493053190467460E+07,
|
||||
17: 1.746526595233730E+07 }
|
||||
|
||||
fp = open(leader_file,'rb')
|
||||
leaderFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/leader_file.xml'),dataFile=fp)
|
||||
leaderFDR.parse()
|
||||
fp.seek(leaderFDR.getEndOfRecordPosition())
|
||||
sceneHeaderRecord = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/scene_record.xml'),dataFile=fp)
|
||||
sceneHeaderRecord.parse()
|
||||
fp.seek(sceneHeaderRecord.getEndOfRecordPosition())
|
||||
|
||||
fsamplookup = int(sceneHeaderRecord.metadata['Range sampling rate in MHz'])
|
||||
rangeSamplingRate = fsampConst[fsamplookup]
|
||||
fp.close()
|
||||
#print('{}'.format(rangeSamplingRate))
|
||||
|
||||
#read from image file
|
||||
fp = open(image_file, 'rb')
|
||||
imageFDR = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_file.xml'), dataFile=fp)
|
||||
imageFDR.parse()
|
||||
fp.seek(imageFDR.getEndOfRecordPosition())
|
||||
imageData = CEOS.CEOSDB(xml=os.path.join(xmlPrefix,'alos2_slc/image_record.xml'), dataFile=fp)
|
||||
imageData.parseFast()
|
||||
|
||||
width = imageFDR.metadata['Number of pixels per line per SAR channel']
|
||||
near_range = imageData.metadata['Slant range to 1st data sample']
|
||||
fp.close()
|
||||
#print('{}'.format(width))
|
||||
#print('{}'.format(near_range))
|
||||
|
||||
return (rangeSamplingRate, width, near_range)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,233 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runRdr2Geo')
|
||||
|
||||
def runRdr2Geo(self):
|
||||
'''compute lat/lon/hgt
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
demFile = os.path.abspath(self._insar.dem)
|
||||
wbdFile = os.path.abspath(self._insar.wbd)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
|
||||
if self.useGPU and self._insar.hasGPU():
|
||||
topoGPU(referenceTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, demFile,
|
||||
self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.los)
|
||||
else:
|
||||
snwe = topoCPU(referenceTrack, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, demFile,
|
||||
self._insar.latitude, self._insar.longitude, self._insar.height, self._insar.los)
|
||||
waterBodyRadar(self._insar.latitude, self._insar.longitude, wbdFile, self._insar.wbdOut)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runRdr2Geo")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def topoCPU(referenceTrack, numberRangeLooks, numberAzimuthLooks, demFile, latFile, lonFile, hgtFile, losFile):
|
||||
import datetime
|
||||
import isceobj
|
||||
from zerodop.topozero import createTopozero
|
||||
from isceobj.Planet.Planet import Planet
|
||||
|
||||
pointingDirection = {'right': -1, 'left' :1}
|
||||
|
||||
demImage = isceobj.createDemImage()
|
||||
demImage.load(demFile + '.xml')
|
||||
demImage.setAccessMode('read')
|
||||
|
||||
planet = Planet(pname='Earth')
|
||||
|
||||
topo = createTopozero()
|
||||
topo.slantRangePixelSpacing = numberRangeLooks * referenceTrack.rangePixelSize
|
||||
topo.prf = 1.0 / (numberAzimuthLooks*referenceTrack.azimuthLineInterval)
|
||||
topo.radarWavelength = referenceTrack.radarWavelength
|
||||
topo.orbit = referenceTrack.orbit
|
||||
topo.width = referenceTrack.numberOfSamples
|
||||
topo.length = referenceTrack.numberOfLines
|
||||
topo.wireInputPort(name='dem', object=demImage)
|
||||
topo.wireInputPort(name='planet', object=planet)
|
||||
topo.numberRangeLooks = 1 #must be set as 1
|
||||
topo.numberAzimuthLooks = 1 #must be set as 1 Cunren
|
||||
topo.lookSide = pointingDirection[referenceTrack.pointingDirection]
|
||||
topo.sensingStart = referenceTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*referenceTrack.azimuthLineInterval)
|
||||
topo.rangeFirstSample = referenceTrack.startingRange + (numberRangeLooks-1.0)/2.0*referenceTrack.rangePixelSize
|
||||
topo.demInterpolationMethod='BIQUINTIC'
|
||||
|
||||
topo.latFilename = latFile
|
||||
topo.lonFilename = lonFile
|
||||
topo.heightFilename = hgtFile
|
||||
topo.losFilename = losFile
|
||||
#topo.incFilename = incName
|
||||
#topo.maskFilename = mskName
|
||||
|
||||
topo.topo()
|
||||
|
||||
return list(topo.snwe)
|
||||
|
||||
|
||||
def topoGPU(referenceTrack, numberRangeLooks, numberAzimuthLooks, demFile, latFile, lonFile, hgtFile, losFile):
|
||||
'''
|
||||
Try with GPU module.
|
||||
'''
|
||||
import datetime
|
||||
import numpy as np
|
||||
from isceobj.Planet.Planet import Planet
|
||||
from zerodop.GPUtopozero.GPUtopozero import PyTopozero
|
||||
from isceobj.Util.Poly2D import Poly2D
|
||||
from iscesys import DateTimeUtil as DTU
|
||||
|
||||
pointingDirection = {'right': -1, 'left' :1}
|
||||
|
||||
#creat poynomials
|
||||
polyDoppler = Poly2D(name='topsApp_dopplerPoly')
|
||||
polyDoppler.setWidth(referenceTrack.numberOfSamples)
|
||||
polyDoppler.setLength(referenceTrack.numberOfLines)
|
||||
polyDoppler.setNormRange(1.0)
|
||||
polyDoppler.setNormAzimuth(1.0)
|
||||
polyDoppler.setMeanRange(0.0)
|
||||
polyDoppler.setMeanAzimuth(0.0)
|
||||
polyDoppler.initPoly(rangeOrder=0,azimuthOrder=0, coeffs=[[0.]])
|
||||
polyDoppler.createPoly2D()
|
||||
|
||||
slantRangeImage = Poly2D()
|
||||
slantRangeImage.setWidth(referenceTrack.numberOfSamples)
|
||||
slantRangeImage.setLength(referenceTrack.numberOfLines)
|
||||
slantRangeImage.setNormRange(1.0)
|
||||
slantRangeImage.setNormAzimuth(1.0)
|
||||
slantRangeImage.setMeanRange(0.)
|
||||
slantRangeImage.setMeanAzimuth(0.)
|
||||
slantRangeImage.initPoly(rangeOrder=1,azimuthOrder=0,
|
||||
coeffs=[[referenceTrack.startingRange + (numberRangeLooks-1.0)/2.0*referenceTrack.rangePixelSize,numberRangeLooks * referenceTrack.rangePixelSize]])
|
||||
slantRangeImage.createPoly2D()
|
||||
|
||||
#creat images
|
||||
latImage = isceobj.createImage()
|
||||
latImage.initImage(latFile, 'write', referenceTrack.numberOfSamples, 'DOUBLE')
|
||||
latImage.createImage()
|
||||
|
||||
lonImage = isceobj.createImage()
|
||||
lonImage.initImage(lonFile, 'write', referenceTrack.numberOfSamples, 'DOUBLE')
|
||||
lonImage.createImage()
|
||||
|
||||
losImage = isceobj.createImage()
|
||||
losImage.initImage(losFile, 'write', referenceTrack.numberOfSamples, 'FLOAT', bands=2, scheme='BIL')
|
||||
losImage.setCaster('write', 'DOUBLE')
|
||||
losImage.createImage()
|
||||
|
||||
heightImage = isceobj.createImage()
|
||||
heightImage.initImage(hgtFile, 'write', referenceTrack.numberOfSamples, 'DOUBLE')
|
||||
heightImage.createImage()
|
||||
|
||||
demImage = isceobj.createDemImage()
|
||||
demImage.load(demFile + '.xml')
|
||||
demImage.setCaster('read', 'FLOAT')
|
||||
demImage.createImage()
|
||||
|
||||
#compute a few things
|
||||
t0 = referenceTrack.sensingStart + datetime.timedelta(seconds=(numberAzimuthLooks-1.0)/2.0*referenceTrack.azimuthLineInterval)
|
||||
orb = referenceTrack.orbit
|
||||
pegHdg = np.radians( orb.getENUHeading(t0))
|
||||
elp = Planet(pname='Earth').ellipsoid
|
||||
|
||||
#call gpu topo
|
||||
topo = PyTopozero()
|
||||
topo.set_firstlat(demImage.getFirstLatitude())
|
||||
topo.set_firstlon(demImage.getFirstLongitude())
|
||||
topo.set_deltalat(demImage.getDeltaLatitude())
|
||||
topo.set_deltalon(demImage.getDeltaLongitude())
|
||||
topo.set_major(elp.a)
|
||||
topo.set_eccentricitySquared(elp.e2)
|
||||
topo.set_rSpace(numberRangeLooks * referenceTrack.rangePixelSize)
|
||||
topo.set_r0(referenceTrack.startingRange + (numberRangeLooks-1.0)/2.0*referenceTrack.rangePixelSize)
|
||||
topo.set_pegHdg(pegHdg)
|
||||
topo.set_prf(1.0 / (numberAzimuthLooks*referenceTrack.azimuthLineInterval))
|
||||
topo.set_t0(DTU.seconds_since_midnight(t0))
|
||||
topo.set_wvl(referenceTrack.radarWavelength)
|
||||
topo.set_thresh(.05)
|
||||
topo.set_demAccessor(demImage.getImagePointer())
|
||||
topo.set_dopAccessor(polyDoppler.getPointer())
|
||||
topo.set_slrngAccessor(slantRangeImage.getPointer())
|
||||
topo.set_latAccessor(latImage.getImagePointer())
|
||||
topo.set_lonAccessor(lonImage.getImagePointer())
|
||||
topo.set_losAccessor(losImage.getImagePointer())
|
||||
topo.set_heightAccessor(heightImage.getImagePointer())
|
||||
topo.set_incAccessor(0)
|
||||
topo.set_maskAccessor(0)
|
||||
topo.set_numIter(25)
|
||||
topo.set_idemWidth(demImage.getWidth())
|
||||
topo.set_idemLength(demImage.getLength())
|
||||
topo.set_ilrl(pointingDirection[referenceTrack.pointingDirection])
|
||||
topo.set_extraIter(10)
|
||||
topo.set_length(referenceTrack.numberOfLines)
|
||||
topo.set_width(referenceTrack.numberOfSamples)
|
||||
topo.set_nRngLooks(1)
|
||||
topo.set_nAzLooks(1)
|
||||
topo.set_demMethod(5) # BIQUINTIC METHOD
|
||||
topo.set_orbitMethod(0) # HERMITE
|
||||
|
||||
# Need to simplify orbit stuff later
|
||||
nvecs = len(orb._stateVectors)
|
||||
topo.set_orbitNvecs(nvecs)
|
||||
topo.set_orbitBasis(1) # Is this ever different?
|
||||
topo.createOrbit() # Initializes the empty orbit to the right allocated size
|
||||
count = 0
|
||||
for sv in orb._stateVectors:
|
||||
td = DTU.seconds_since_midnight(sv.getTime())
|
||||
pos = sv.getPosition()
|
||||
vel = sv.getVelocity()
|
||||
topo.set_orbitVector(count,td,pos[0],pos[1],pos[2],vel[0],vel[1],vel[2])
|
||||
count += 1
|
||||
|
||||
topo.runTopo()
|
||||
|
||||
#tidy up
|
||||
latImage.addDescription('Pixel-by-pixel latitude in degrees.')
|
||||
latImage.finalizeImage()
|
||||
latImage.renderHdr()
|
||||
|
||||
lonImage.addDescription('Pixel-by-pixel longitude in degrees.')
|
||||
lonImage.finalizeImage()
|
||||
lonImage.renderHdr()
|
||||
|
||||
heightImage.addDescription('Pixel-by-pixel height in meters.')
|
||||
heightImage.finalizeImage()
|
||||
heightImage.renderHdr()
|
||||
|
||||
descr = '''Two channel Line-Of-Sight geometry image (all angles in degrees). Represents vector drawn from target to platform.
|
||||
Channel 1: Incidence angle measured from vertical at target (always +ve).
|
||||
Channel 2: Azimuth angle measured from North in Anti-clockwise direction.'''
|
||||
losImage.setImageType('bil')
|
||||
losImage.addDescription(descr)
|
||||
losImage.finalizeImage()
|
||||
losImage.renderHdr()
|
||||
|
||||
demImage.finalizeImage()
|
||||
|
||||
if slantRangeImage:
|
||||
try:
|
||||
slantRangeImage.finalizeImage()
|
||||
except:
|
||||
pass
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,340 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from mroipac.ampcor.Ampcor import Ampcor
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import runCmd
|
||||
from contrib.alos2proc.alos2proc import look
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset
|
||||
from contrib.alos2proc_f.alos2proc_f import fitoff
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runRdrDemOffset')
|
||||
|
||||
def runRdrDemOffset(self):
|
||||
'''estimate between radar image and dem
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
|
||||
rdrDemOffset(self, referenceTrack, catalog=catalog)
|
||||
|
||||
|
||||
def rdrDemOffset(self, referenceTrack, catalog=None):
|
||||
|
||||
demFile = os.path.abspath(self._insar.dem)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
rdrDemDir = 'rdr_dem_offset'
|
||||
os.makedirs(rdrDemDir, exist_ok=True)
|
||||
os.chdir(rdrDemDir)
|
||||
|
||||
##################################################################################################
|
||||
#compute dem pixel size
|
||||
demImage = isceobj.createDemImage()
|
||||
demImage.load(demFile + '.xml')
|
||||
#DEM pixel size in meters (appoximate value)
|
||||
demDeltaLon = abs(demImage.getDeltaLongitude()) / 0.0002777777777777778 * 30.0
|
||||
demDeltaLat = abs(demImage.getDeltaLatitude()) / 0.0002777777777777778 * 30.0
|
||||
|
||||
#number of looks to take in range
|
||||
if self._insar.numberRangeLooksSim == None:
|
||||
if self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize > demDeltaLon:
|
||||
self._insar.numberRangeLooksSim = 1
|
||||
else:
|
||||
self._insar.numberRangeLooksSim = int(demDeltaLon / (self._insar.numberRangeLooks1 * referenceTrack.rangePixelSize) + 0.5)
|
||||
#number of looks to take in azimuth
|
||||
if self._insar.numberAzimuthLooksSim == None:
|
||||
if self._insar.numberAzimuthLooks1 * referenceTrack.azimuthPixelSize > demDeltaLat:
|
||||
self._insar.numberAzimuthLooksSim = 1
|
||||
else:
|
||||
self._insar.numberAzimuthLooksSim = int(demDeltaLat / (self._insar.numberAzimuthLooks1 * referenceTrack.azimuthPixelSize) + 0.5)
|
||||
|
||||
#simulate a radar image using dem
|
||||
simulateRadar(os.path.join('../', self._insar.height), self._insar.sim, scale=3.0, offset=100.0)
|
||||
sim = isceobj.createImage()
|
||||
sim.load(self._insar.sim+'.xml')
|
||||
|
||||
#take looks
|
||||
if (self._insar.numberRangeLooksSim == 1) and (self._insar.numberAzimuthLooksSim == 1):
|
||||
simLookFile = self._insar.sim
|
||||
ampLookFile = 'amp_{}rlks_{}alks.float'.format(self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1,
|
||||
self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1)
|
||||
cmd = "imageMath.py -e='sqrt(a_0*a_0+a_1*a_1)' --a={} -o {} -t float".format(os.path.join('../', self._insar.amplitude), ampLookFile)
|
||||
runCmd(cmd)
|
||||
else:
|
||||
simLookFile = 'sim_{}rlks_{}alks.float'.format(self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1,
|
||||
self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1)
|
||||
ampLookFile = 'amp_{}rlks_{}alks.float'.format(self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1,
|
||||
self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1)
|
||||
ampTmpFile = 'amp_tmp.float'
|
||||
look(self._insar.sim, simLookFile, sim.width, self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, 2, 0, 1)
|
||||
look(os.path.join('../', self._insar.amplitude), ampTmpFile, sim.width, self._insar.numberRangeLooksSim, self._insar.numberAzimuthLooksSim, 4, 1, 1)
|
||||
|
||||
width = int(sim.width/self._insar.numberRangeLooksSim)
|
||||
length = int(sim.length/self._insar.numberAzimuthLooksSim)
|
||||
create_xml(simLookFile, width, length, 'float')
|
||||
create_xml(ampTmpFile, width, length, 'amp')
|
||||
|
||||
cmd = "imageMath.py -e='sqrt(a_0*a_0+a_1*a_1)' --a={} -o {} -t float".format(ampTmpFile, ampLookFile)
|
||||
runCmd(cmd)
|
||||
os.remove(ampTmpFile)
|
||||
os.remove(ampTmpFile+'.vrt')
|
||||
os.remove(ampTmpFile+'.xml')
|
||||
|
||||
#initial number of offsets to use
|
||||
numberOfOffsets = 800
|
||||
#compute land ratio to further determine the number of offsets to use
|
||||
wbd=np.memmap(os.path.join('../', self._insar.wbdOut), dtype='byte', mode='r', shape=(sim.length, sim.width))
|
||||
landRatio = np.sum(wbd[0:sim.length:10, 0:sim.width:10]!=-1) / int(sim.length/10) / int(sim.width/10)
|
||||
del wbd
|
||||
if (landRatio <= 0.00125):
|
||||
print('\n\nWARNING: land area too small for estimating offsets between radar and dem')
|
||||
print('do not estimate offsets between radar and dem\n\n')
|
||||
if catalog is not None:
|
||||
self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]
|
||||
catalog.addItem('warning message', 'land area too small for estimating offsets between radar and dem', 'runRdrDemOffset')
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
if catalog is not None:
|
||||
catalog.printToLog(logger, "runRdrDemOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
return
|
||||
|
||||
#total number of offsets to use
|
||||
numberOfOffsets /= landRatio
|
||||
#allocate number of offsets in range/azimuth according to image width/length
|
||||
width = int(sim.width/self._insar.numberRangeLooksSim)
|
||||
length = int(sim.length/self._insar.numberAzimuthLooksSim)
|
||||
#number of offsets to use in range/azimuth
|
||||
numberOfOffsetsRange = int(np.sqrt(numberOfOffsets * width / length))
|
||||
numberOfOffsetsAzimuth = int(length / width * np.sqrt(numberOfOffsets * width / length))
|
||||
|
||||
#this should be better?
|
||||
numberOfOffsetsRange = int(np.sqrt(numberOfOffsets))
|
||||
numberOfOffsetsAzimuth = int(np.sqrt(numberOfOffsets))
|
||||
|
||||
|
||||
if numberOfOffsetsRange > int(width/2):
|
||||
numberOfOffsetsRange = int(width/2)
|
||||
if numberOfOffsetsAzimuth > int(length/2):
|
||||
numberOfOffsetsAzimuth = int(length/2)
|
||||
|
||||
if numberOfOffsetsRange < 10:
|
||||
numberOfOffsetsRange = 10
|
||||
if numberOfOffsetsAzimuth < 10:
|
||||
numberOfOffsetsAzimuth = 10
|
||||
|
||||
if catalog is not None:
|
||||
catalog.addItem('number of range offsets', '{}'.format(numberOfOffsetsRange), 'runRdrDemOffset')
|
||||
catalog.addItem('number of azimuth offsets', '{}'.format(numberOfOffsetsAzimuth), 'runRdrDemOffset')
|
||||
|
||||
#matching
|
||||
ampcor = Ampcor(name='insarapp_slcs_ampcor')
|
||||
ampcor.configure()
|
||||
|
||||
mMag = isceobj.createImage()
|
||||
mMag.load(ampLookFile+'.xml')
|
||||
mMag.setAccessMode('read')
|
||||
mMag.createImage()
|
||||
|
||||
sMag = isceobj.createImage()
|
||||
sMag.load(simLookFile+'.xml')
|
||||
sMag.setAccessMode('read')
|
||||
sMag.createImage()
|
||||
|
||||
ampcor.setImageDataType1('real')
|
||||
ampcor.setImageDataType2('real')
|
||||
|
||||
ampcor.setReferenceSlcImage(mMag)
|
||||
ampcor.setSecondarySlcImage(sMag)
|
||||
|
||||
#MATCH REGION
|
||||
rgoff = 0
|
||||
azoff = 0
|
||||
#it seems that we cannot use 0, haven't look into the problem
|
||||
if rgoff == 0:
|
||||
rgoff = 1
|
||||
if azoff == 0:
|
||||
azoff = 1
|
||||
firstSample = 1
|
||||
if rgoff < 0:
|
||||
firstSample = int(35 - rgoff)
|
||||
firstLine = 1
|
||||
if azoff < 0:
|
||||
firstLine = int(35 - azoff)
|
||||
ampcor.setAcrossGrossOffset(rgoff)
|
||||
ampcor.setDownGrossOffset(azoff)
|
||||
ampcor.setFirstSampleAcross(firstSample)
|
||||
ampcor.setLastSampleAcross(width)
|
||||
ampcor.setNumberLocationAcross(numberOfOffsetsRange)
|
||||
ampcor.setFirstSampleDown(firstLine)
|
||||
ampcor.setLastSampleDown(length)
|
||||
ampcor.setNumberLocationDown(numberOfOffsetsAzimuth)
|
||||
|
||||
#MATCH PARAMETERS
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(64)
|
||||
#note this is the half width/length of search area, so number of resulting correlation samples: 8*2+1
|
||||
ampcor.setSearchWindowSizeWidth(16)
|
||||
ampcor.setSearchWindowSizeHeight(16)
|
||||
|
||||
#REST OF THE STUFF
|
||||
ampcor.setAcrossLooks(1)
|
||||
ampcor.setDownLooks(1)
|
||||
ampcor.setOversamplingFactor(64)
|
||||
ampcor.setZoomWindowSize(16)
|
||||
#1. The following not set
|
||||
#Matching Scale for Sample/Line Directions (-) = 1. 1.
|
||||
#should add the following in Ampcor.py?
|
||||
#if not set, in this case, Ampcor.py'value is also 1. 1.
|
||||
#ampcor.setScaleFactorX(1.)
|
||||
#ampcor.setScaleFactorY(1.)
|
||||
|
||||
#MATCH THRESHOLDS AND DEBUG DATA
|
||||
#2. The following not set
|
||||
#in roi_pac the value is set to 0 1
|
||||
#in isce the value is set to 0.001 1000.0
|
||||
#SNR and Covariance Thresholds (-) = {s1} {s2}
|
||||
#should add the following in Ampcor?
|
||||
#THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC
|
||||
#ampcor.setThresholdSNR(0)
|
||||
#ampcor.setThresholdCov(1)
|
||||
ampcor.setDebugFlag(False)
|
||||
ampcor.setDisplayFlag(False)
|
||||
|
||||
#in summary, only two things not set which are indicated by 'The following not set' above.
|
||||
|
||||
#run ampcor
|
||||
ampcor.ampcor()
|
||||
offsets = ampcor.getOffsetField()
|
||||
ampcorOffsetFile = 'ampcor.off'
|
||||
cullOffsetFile = 'cull.off'
|
||||
affineTransformFile = 'affine_transform.txt'
|
||||
writeOffset(offsets, ampcorOffsetFile)
|
||||
|
||||
#finalize image, and re-create it
|
||||
#otherwise the file pointer is still at the end of the image
|
||||
mMag.finalizeImage()
|
||||
sMag.finalizeImage()
|
||||
|
||||
# #cull offsets
|
||||
# import io
|
||||
# from contextlib import redirect_stdout
|
||||
# f = io.StringIO()
|
||||
# with redirect_stdout(f):
|
||||
# fitoff(ampcorOffsetFile, cullOffsetFile, 1.5, .5, 50)
|
||||
# s = f.getvalue()
|
||||
# #print(s)
|
||||
# with open(affineTransformFile, 'w') as f:
|
||||
# f.write(s)
|
||||
|
||||
#cull offsets
|
||||
import subprocess
|
||||
proc = subprocess.Popen(["python3", "-c", "import isce; from contrib.alos2proc_f.alos2proc_f import fitoff; fitoff('ampcor.off', 'cull.off', 1.5, .5, 50)"], stdout=subprocess.PIPE)
|
||||
out = proc.communicate()[0]
|
||||
with open(affineTransformFile, 'w') as f:
|
||||
f.write(out.decode('utf-8'))
|
||||
|
||||
#check number of offsets left
|
||||
with open(cullOffsetFile, 'r') as f:
|
||||
numCullOffsets = sum(1 for linex in f)
|
||||
if numCullOffsets < 50:
|
||||
print('\n\nWARNING: too few points left after culling, {} left'.format(numCullOffsets))
|
||||
print('do not estimate offsets between radar and dem\n\n')
|
||||
self._insar.radarDemAffineTransform = [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]
|
||||
if catalog is not None:
|
||||
catalog.addItem('warning message', 'too few points left after culling, {} left'.format(numCullOffsets), 'runRdrDemOffset')
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
if catalog is not None:
|
||||
catalog.printToLog(logger, "runRdrDemOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
return
|
||||
|
||||
#read affine transform parameters
|
||||
with open(affineTransformFile) as f:
|
||||
lines = f.readlines()
|
||||
i = 0
|
||||
for linex in lines:
|
||||
if 'Affine Matrix ' in linex:
|
||||
m11 = float(lines[i + 2].split()[0])
|
||||
m12 = float(lines[i + 2].split()[1])
|
||||
m21 = float(lines[i + 3].split()[0])
|
||||
m22 = float(lines[i + 3].split()[1])
|
||||
t1 = float(lines[i + 7].split()[0])
|
||||
t2 = float(lines[i + 7].split()[1])
|
||||
break
|
||||
i += 1
|
||||
|
||||
self._insar.radarDemAffineTransform = [m11, m12, m21, m22, t1, t2]
|
||||
##################################################################################################
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
|
||||
if catalog is not None:
|
||||
catalog.printToLog(logger, "runRdrDemOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def simulateRadar(hgtfile, simfile, scale=3.0, offset=100.0):
|
||||
'''
|
||||
simulate a radar image by computing gradient of a dem image.
|
||||
'''
|
||||
import numpy as np
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
#set chunk length here for efficient processing
|
||||
###############################################
|
||||
chunk_length = 1000
|
||||
###############################################
|
||||
|
||||
hgt = isceobj.createImage()
|
||||
hgt.load(hgtfile+'.xml')
|
||||
|
||||
chunk_width = hgt.width
|
||||
num_chunk = int(hgt.length/chunk_length)
|
||||
chunk_length_last = hgt.length - num_chunk * chunk_length
|
||||
|
||||
simData = np.zeros((chunk_length, chunk_width), dtype=np.float32)
|
||||
|
||||
hgtfp = open(hgtfile,'rb')
|
||||
simfp = open(simfile,'wb')
|
||||
|
||||
print("simulating a radar image using topography")
|
||||
for i in range(num_chunk):
|
||||
print("processing chunk %6d of %6d" % (i+1, num_chunk), end='\r', flush=True)
|
||||
hgtData = np.fromfile(hgtfp, dtype=np.float64, count=chunk_length*chunk_width).reshape(chunk_length, chunk_width)
|
||||
simData[:, 0:chunk_width-1] = scale * np.diff(hgtData, axis=1) + offset
|
||||
simData.astype(np.float32).tofile(simfp)
|
||||
|
||||
print("processing chunk %6d of %6d" % (num_chunk, num_chunk))
|
||||
if chunk_length_last != 0:
|
||||
hgtData = np.fromfile(hgtfp, dtype=np.float64, count=chunk_length_last*chunk_width).reshape(chunk_length_last, chunk_width)
|
||||
simData[0:chunk_length_last, 0:chunk_width-1] = scale * np.diff(hgtData, axis=1) + offset
|
||||
(simData[0:chunk_length_last, :]).astype(np.float32).tofile(simfp)
|
||||
|
||||
hgtfp.close()
|
||||
simfp.close()
|
||||
create_xml(simfile, hgt.width, hgt.length, 'float')
|
||||
|
|
@ -0,0 +1,60 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
|
||||
import isceobj
|
||||
from contrib.alos2proc_f.alos2proc_f import rect_with_looks
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runRectRangeOffset')
|
||||
|
||||
def runRectRangeOffset(self):
|
||||
'''rectify range offset
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
insarDir = 'insar'
|
||||
os.makedirs(insarDir, exist_ok=True)
|
||||
os.chdir(insarDir)
|
||||
|
||||
|
||||
#rectify
|
||||
rgoff = isceobj.createImage()
|
||||
rgoff.load(self._insar.rangeOffset+'.xml')
|
||||
|
||||
if self._insar.radarDemAffineTransform == [1.0, 0.0, 0.0, 1.0, 0.0, 0.0]:
|
||||
if not os.path.isfile(self._insar.rectRangeOffset):
|
||||
os.symlink(self._insar.rangeOffset, self._insar.rectRangeOffset)
|
||||
create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float')
|
||||
else:
|
||||
rect_with_looks(self._insar.rangeOffset,
|
||||
self._insar.rectRangeOffset,
|
||||
rgoff.width, rgoff.length,
|
||||
rgoff.width, rgoff.length,
|
||||
self._insar.radarDemAffineTransform[0], self._insar.radarDemAffineTransform[1],
|
||||
self._insar.radarDemAffineTransform[2], self._insar.radarDemAffineTransform[3],
|
||||
self._insar.radarDemAffineTransform[4], self._insar.radarDemAffineTransform[5],
|
||||
self._insar.numberRangeLooksSim*self._insar.numberRangeLooks1, self._insar.numberAzimuthLooksSim*self._insar.numberAzimuthLooks1,
|
||||
self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1,
|
||||
'REAL',
|
||||
'Bilinear')
|
||||
create_xml(self._insar.rectRangeOffset, rgoff.width, rgoff.length, 'float')
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runRectRangeOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,272 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.runRdr2Geo import topoCPU
|
||||
from isceobj.Alos2Proc.runRdr2Geo import topoGPU
|
||||
from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrCPU
|
||||
from isceobj.Alos2Proc.runGeo2Rdr import geo2RdrGPU
|
||||
from contrib.alos2proc.alos2proc import resamp
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import renameFile
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
|
||||
from mroipac.ampcor.Ampcor import Ampcor
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsetsRoipac
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runSlcMatch')
|
||||
|
||||
def runSlcMatch(self):
|
||||
'''match a pair of SLCs
|
||||
'''
|
||||
if not self.doDenseOffset:
|
||||
return
|
||||
if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)):
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
demFile = os.path.abspath(self._insar.dem)
|
||||
wbdFile = os.path.abspath(self._insar.wbd)
|
||||
|
||||
denseOffsetDir = 'dense_offset'
|
||||
os.makedirs(denseOffsetDir, exist_ok=True)
|
||||
os.chdir(denseOffsetDir)
|
||||
|
||||
referenceTrack = self._insar.loadProduct(self._insar.referenceTrackParameter)
|
||||
secondaryTrack = self._insar.loadProduct(self._insar.secondaryTrackParameter)
|
||||
|
||||
#########################################################################################
|
||||
|
||||
|
||||
##################################################
|
||||
# compute geometric offsets
|
||||
##################################################
|
||||
if self.useGPU and self._insar.hasGPU():
|
||||
topoGPU(referenceTrack, 1, 1, demFile,
|
||||
'lat.rdr', 'lon.rdr', 'hgt.rdr', 'los.rdr')
|
||||
geo2RdrGPU(secondaryTrack, 1, 1,
|
||||
'lat.rdr', 'lon.rdr', 'hgt.rdr', 'rg.off', 'az.off')
|
||||
else:
|
||||
topoCPU(referenceTrack, 1, 1, demFile,
|
||||
'lat.rdr', 'lon.rdr', 'hgt.rdr', 'los.rdr')
|
||||
geo2RdrCPU(secondaryTrack, 1, 1,
|
||||
'lat.rdr', 'lon.rdr', 'hgt.rdr', 'rg.off', 'az.off')
|
||||
|
||||
|
||||
##################################################
|
||||
# resample SLC
|
||||
##################################################
|
||||
#SecondarySlcResampled = os.path.splitext(self._insar.secondarySlc)[0]+'_resamp'+os.path.splitext(self._insar.secondarySlc)[1]
|
||||
SecondarySlcResampled = self._insar.secondarySlcCoregistered
|
||||
rangeOffsets2Frac = 0.0
|
||||
azimuthOffsets2Frac = 0.0
|
||||
resamp(self._insar.secondarySlc,
|
||||
SecondarySlcResampled,
|
||||
'rg.off',
|
||||
'az.off',
|
||||
referenceTrack.numberOfSamples, referenceTrack.numberOfLines,
|
||||
secondaryTrack.prf,
|
||||
secondaryTrack.dopplerVsPixel,
|
||||
[rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
||||
[azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
|
||||
create_xml(SecondarySlcResampled, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'slc')
|
||||
|
||||
|
||||
if self.estimateResidualOffset:
|
||||
|
||||
numberOfOffsets = 800
|
||||
rangeStep = 50
|
||||
|
||||
length = referenceTrack.numberOfLines
|
||||
width = referenceTrack.numberOfSamples
|
||||
waterBodyRadar('lat.rdr', 'lon.rdr', wbdFile, 'wbd.rdr')
|
||||
wbd=np.memmap('wbd.rdr', dtype=np.int8, mode='r', shape=(length, width))
|
||||
azimuthStep = int(length/width*rangeStep+0.5)
|
||||
landRatio = np.sum(wbd[0:length:azimuthStep,0:width:rangeStep]!=-1)/(int(length/azimuthStep)*int(width/rangeStep))
|
||||
del wbd
|
||||
|
||||
if (landRatio <= 0.00125):
|
||||
print('\n\nWARNING: land area too small for estimating residual slc offsets')
|
||||
print('do not estimate residual offsets\n\n')
|
||||
catalog.addItem('warning message', 'land area too small for estimating residual slc offsets', 'runSlcMatch')
|
||||
else:
|
||||
numberOfOffsets /= landRatio
|
||||
#we use equal number of offsets in range and azimuth here
|
||||
numberOfOffsetsRange = int(np.sqrt(numberOfOffsets)+0.5)
|
||||
numberOfOffsetsAzimuth = int(np.sqrt(numberOfOffsets)+0.5)
|
||||
if numberOfOffsetsRange > int(width/2):
|
||||
numberOfOffsetsRange = int(width/2)
|
||||
if numberOfOffsetsAzimuth > int(length/2):
|
||||
numberOfOffsetsAzimuth = int(length/2)
|
||||
if numberOfOffsetsRange < 10:
|
||||
numberOfOffsetsRange = 10
|
||||
if numberOfOffsetsAzimuth < 10:
|
||||
numberOfOffsetsAzimuth = 10
|
||||
|
||||
|
||||
##########################################
|
||||
#2. match using ampcor
|
||||
##########################################
|
||||
ampcor = Ampcor(name='insarapp_slcs_ampcor')
|
||||
ampcor.configure()
|
||||
|
||||
mSLC = isceobj.createSlcImage()
|
||||
mSLC.load(self._insar.referenceSlc+'.xml')
|
||||
mSLC.setAccessMode('read')
|
||||
mSLC.createImage()
|
||||
|
||||
sSLC = isceobj.createSlcImage()
|
||||
sSLC.load(SecondarySlcResampled+'.xml')
|
||||
sSLC.setAccessMode('read')
|
||||
sSLC.createImage()
|
||||
|
||||
ampcor.setImageDataType1('complex')
|
||||
ampcor.setImageDataType2('complex')
|
||||
|
||||
ampcor.setReferenceSlcImage(mSLC)
|
||||
ampcor.setSecondarySlcImage(sSLC)
|
||||
|
||||
#MATCH REGION
|
||||
#compute an offset at image center to use
|
||||
rgoff = 0.0
|
||||
azoff = 0.0
|
||||
#it seems that we cannot use 0, haven't look into the problem
|
||||
if rgoff == 0:
|
||||
rgoff = 1
|
||||
if azoff == 0:
|
||||
azoff = 1
|
||||
firstSample = 1
|
||||
if rgoff < 0:
|
||||
firstSample = int(35 - rgoff)
|
||||
firstLine = 1
|
||||
if azoff < 0:
|
||||
firstLine = int(35 - azoff)
|
||||
ampcor.setAcrossGrossOffset(rgoff)
|
||||
ampcor.setDownGrossOffset(azoff)
|
||||
ampcor.setFirstSampleAcross(firstSample)
|
||||
ampcor.setLastSampleAcross(mSLC.width)
|
||||
ampcor.setNumberLocationAcross(numberOfOffsetsRange)
|
||||
ampcor.setFirstSampleDown(firstLine)
|
||||
ampcor.setLastSampleDown(mSLC.length)
|
||||
ampcor.setNumberLocationDown(numberOfOffsetsAzimuth)
|
||||
|
||||
#MATCH PARAMETERS
|
||||
#full-aperture mode
|
||||
if (self._insar.modeCombination == 21) or \
|
||||
(self._insar.modeCombination == 22) or \
|
||||
(self._insar.modeCombination == 31) or \
|
||||
(self._insar.modeCombination == 32):
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(512)
|
||||
#note this is the half width/length of search area, number of resulting correlation samples: 32*2+1
|
||||
ampcor.setSearchWindowSizeWidth(32)
|
||||
ampcor.setSearchWindowSizeHeight(32)
|
||||
#triggering full-aperture mode matching
|
||||
ampcor.setWinsizeFilt(8)
|
||||
ampcor.setOversamplingFactorFilt(64)
|
||||
#regular mode
|
||||
else:
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(64)
|
||||
ampcor.setSearchWindowSizeWidth(16)
|
||||
ampcor.setSearchWindowSizeHeight(16)
|
||||
|
||||
#REST OF THE STUFF
|
||||
ampcor.setAcrossLooks(1)
|
||||
ampcor.setDownLooks(1)
|
||||
ampcor.setOversamplingFactor(64)
|
||||
ampcor.setZoomWindowSize(16)
|
||||
#1. The following not set
|
||||
#Matching Scale for Sample/Line Directions (-) = 1. 1.
|
||||
#should add the following in Ampcor.py?
|
||||
#if not set, in this case, Ampcor.py'value is also 1. 1.
|
||||
#ampcor.setScaleFactorX(1.)
|
||||
#ampcor.setScaleFactorY(1.)
|
||||
|
||||
#MATCH THRESHOLDS AND DEBUG DATA
|
||||
#2. The following not set
|
||||
#in roi_pac the value is set to 0 1
|
||||
#in isce the value is set to 0.001 1000.0
|
||||
#SNR and Covariance Thresholds (-) = {s1} {s2}
|
||||
#should add the following in Ampcor?
|
||||
#THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC
|
||||
#ampcor.setThresholdSNR(0)
|
||||
#ampcor.setThresholdCov(1)
|
||||
ampcor.setDebugFlag(False)
|
||||
ampcor.setDisplayFlag(False)
|
||||
|
||||
#in summary, only two things not set which are indicated by 'The following not set' above.
|
||||
|
||||
#run ampcor
|
||||
ampcor.ampcor()
|
||||
offsets = ampcor.getOffsetField()
|
||||
mSLC.finalizeImage()
|
||||
sSLC.finalizeImage()
|
||||
|
||||
|
||||
#3. cull offsets
|
||||
refinedOffsets = cullOffsetsRoipac(offsets, numThreshold=50)
|
||||
if refinedOffsets == None:
|
||||
print('\n\nWARNING: too few offsets left for slc residual offset estimation')
|
||||
print('do not estimate residual offsets\n\n')
|
||||
catalog.addItem('warning message', 'too few offsets left for slc residual offset estimation', 'runSlcMatch')
|
||||
else:
|
||||
rangeOffset, azimuthOffset = meanOffset(refinedOffsets)
|
||||
os.remove(SecondarySlcResampled)
|
||||
os.remove(SecondarySlcResampled+'.vrt')
|
||||
os.remove(SecondarySlcResampled+'.xml')
|
||||
|
||||
rangeOffsets2Frac = rangeOffset
|
||||
azimuthOffsets2Frac = azimuthOffset
|
||||
resamp(self._insar.secondarySlc,
|
||||
SecondarySlcResampled,
|
||||
'rg.off',
|
||||
'az.off',
|
||||
referenceTrack.numberOfSamples, referenceTrack.numberOfLines,
|
||||
secondaryTrack.prf,
|
||||
secondaryTrack.dopplerVsPixel,
|
||||
[rangeOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0],
|
||||
[azimuthOffsets2Frac, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0])
|
||||
create_xml(SecondarySlcResampled, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'slc')
|
||||
|
||||
catalog.addItem('number of offsets range', numberOfOffsetsRange, 'runSlcMatch')
|
||||
catalog.addItem('number of offsets azimuth', numberOfOffsetsAzimuth, 'runSlcMatch')
|
||||
catalog.addItem('range residual offset after geometric coregistration', rangeOffset, 'runSlcMatch')
|
||||
catalog.addItem('azimuth residual offset after geometric coregistration', azimuthOffset, 'runSlcMatch')
|
||||
|
||||
|
||||
|
||||
|
||||
if self.deleteGeometryFiles:
|
||||
os.remove('lat.rdr')
|
||||
os.remove('lat.rdr.vrt')
|
||||
os.remove('lat.rdr.xml')
|
||||
os.remove('lon.rdr')
|
||||
os.remove('lon.rdr.vrt')
|
||||
os.remove('lon.rdr.xml')
|
||||
os.remove('hgt.rdr')
|
||||
os.remove('hgt.rdr.vrt')
|
||||
os.remove('hgt.rdr.xml')
|
||||
os.remove('los.rdr')
|
||||
os.remove('los.rdr.vrt')
|
||||
os.remove('los.rdr.xml')
|
||||
# if os.path.isfile('wbd.rdr'):
|
||||
# os.remove('wbd.rdr')
|
||||
# os.remove('wbd.rdr.vrt')
|
||||
# os.remove('wbd.rdr.xml')
|
||||
|
||||
#########################################################################################
|
||||
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runSlcMatch")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,235 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import logging
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.runFrameOffset import frameOffset
|
||||
from isceobj.Alos2Proc.runFrameMosaic import frameMosaic
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runSlcMosaic')
|
||||
|
||||
def runSlcMosaic(self):
|
||||
'''mosaic SLCs
|
||||
'''
|
||||
if not self.doDenseOffset:
|
||||
print('\ndense offset not requested, skip this and the remaining steps...')
|
||||
return
|
||||
if not ((self._insar.modeCombination == 0) or (self._insar.modeCombination == 1)):
|
||||
print('dense offset only support spotligh-spotlight and stripmap-stripmap pairs')
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
denseOffsetDir = 'dense_offset'
|
||||
os.makedirs(denseOffsetDir, exist_ok=True)
|
||||
os.chdir(denseOffsetDir)
|
||||
|
||||
|
||||
##################################################
|
||||
# estimate reference and secondary frame offsets
|
||||
##################################################
|
||||
if len(referenceTrack.frames) > 1:
|
||||
matchingMode=1
|
||||
|
||||
#determine whether reference offset from matching is already done in previous InSAR processing.
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
referenceEstimated = False
|
||||
else:
|
||||
if self.frameOffsetMatching == False:
|
||||
referenceEstimated = False
|
||||
else:
|
||||
referenceEstimated = True
|
||||
else:
|
||||
if self.frameOffsetMatching == False:
|
||||
referenceEstimated = False
|
||||
else:
|
||||
referenceEstimated = True
|
||||
|
||||
#if reference offsets from matching are not already computed
|
||||
#if self.frameOffsetMatching == False:
|
||||
if referenceEstimated == False:
|
||||
offsetReference = frameOffset(referenceTrack, self._insar.referenceSlc, self._insar.referenceFrameOffset,
|
||||
crossCorrelation=True, matchingMode=matchingMode)
|
||||
offsetSecondary = frameOffset(secondaryTrack, self._insar.secondarySlc, self._insar.secondaryFrameOffset,
|
||||
crossCorrelation=True, matchingMode=matchingMode)
|
||||
#if self.frameOffsetMatching == False:
|
||||
if referenceEstimated == False:
|
||||
self._insar.frameRangeOffsetMatchingReference = offsetReference[2]
|
||||
self._insar.frameAzimuthOffsetMatchingReference = offsetReference[3]
|
||||
self._insar.frameRangeOffsetMatchingSecondary = offsetSecondary[2]
|
||||
self._insar.frameAzimuthOffsetMatchingSecondary = offsetSecondary[3]
|
||||
|
||||
|
||||
##################################################
|
||||
# mosaic slc
|
||||
##################################################
|
||||
numberOfFrames = len(referenceTrack.frames)
|
||||
if numberOfFrames == 1:
|
||||
import shutil
|
||||
#frameDir = os.path.join('f1_{}/mosaic'.format(self._insar.referenceFrames[0]))
|
||||
frameDir = os.path.join('f1_{}/s{}'.format(self._insar.referenceFrames[0], self._insar.startingSwath))
|
||||
if not os.path.isfile(self._insar.referenceSlc):
|
||||
if os.path.isfile(os.path.join('../', frameDir, self._insar.referenceSlc)):
|
||||
os.symlink(os.path.join('../', frameDir, self._insar.referenceSlc), self._insar.referenceSlc)
|
||||
#shutil.copy2() can overwrite
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.referenceSlc+'.vrt'), self._insar.referenceSlc+'.vrt')
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.referenceSlc+'.xml'), self._insar.referenceSlc+'.xml')
|
||||
if not os.path.isfile(self._insar.secondarySlc):
|
||||
if os.path.isfile(os.path.join('../', frameDir, self._insar.secondarySlc)):
|
||||
os.symlink(os.path.join('../', frameDir, self._insar.secondarySlc), self._insar.secondarySlc)
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.secondarySlc+'.vrt'), self._insar.secondarySlc+'.vrt')
|
||||
shutil.copy2(os.path.join('../', frameDir, self._insar.secondarySlc+'.xml'), self._insar.secondarySlc+'.xml')
|
||||
|
||||
#update track parameters
|
||||
#########################################################
|
||||
#mosaic size
|
||||
referenceTrack.numberOfSamples = referenceTrack.frames[0].swaths[0].numberOfSamples
|
||||
referenceTrack.numberOfLines = referenceTrack.frames[0].swaths[0].numberOfLines
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
referenceTrack.startingRange = referenceTrack.frames[0].swaths[0].startingRange
|
||||
referenceTrack.rangeSamplingRate = referenceTrack.frames[0].swaths[0].rangeSamplingRate
|
||||
referenceTrack.rangePixelSize = referenceTrack.frames[0].swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
referenceTrack.sensingStart = referenceTrack.frames[0].swaths[0].sensingStart
|
||||
referenceTrack.prf = referenceTrack.frames[0].swaths[0].prf
|
||||
referenceTrack.azimuthPixelSize = referenceTrack.frames[0].swaths[0].azimuthPixelSize
|
||||
referenceTrack.azimuthLineInterval = referenceTrack.frames[0].swaths[0].azimuthLineInterval
|
||||
|
||||
referenceTrack.dopplerVsPixel = referenceTrack.frames[0].swaths[0].dopplerVsPixel
|
||||
|
||||
#update track parameters, secondary
|
||||
#########################################################
|
||||
#mosaic size
|
||||
secondaryTrack.numberOfSamples = secondaryTrack.frames[0].swaths[0].numberOfSamples
|
||||
secondaryTrack.numberOfLines = secondaryTrack.frames[0].swaths[0].numberOfLines
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
secondaryTrack.startingRange = secondaryTrack.frames[0].swaths[0].startingRange
|
||||
secondaryTrack.rangeSamplingRate = secondaryTrack.frames[0].swaths[0].rangeSamplingRate
|
||||
secondaryTrack.rangePixelSize = secondaryTrack.frames[0].swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
secondaryTrack.sensingStart = secondaryTrack.frames[0].swaths[0].sensingStart
|
||||
secondaryTrack.prf = secondaryTrack.frames[0].swaths[0].prf
|
||||
secondaryTrack.azimuthPixelSize = secondaryTrack.frames[0].swaths[0].azimuthPixelSize
|
||||
secondaryTrack.azimuthLineInterval = secondaryTrack.frames[0].swaths[0].azimuthLineInterval
|
||||
|
||||
secondaryTrack.dopplerVsPixel = secondaryTrack.frames[0].swaths[0].dopplerVsPixel
|
||||
|
||||
else:
|
||||
#in case InSAR, and therefore runSwathMosaic, was not done previously
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
#update frame parameters
|
||||
#########################################################
|
||||
frame = referenceTrack.frames[i]
|
||||
#mosaic size
|
||||
frame.numberOfSamples = frame.swaths[0].numberOfSamples
|
||||
frame.numberOfLines = frame.swaths[0].numberOfLines
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
frame.startingRange = frame.swaths[0].startingRange
|
||||
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
|
||||
frame.rangePixelSize = frame.swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
frame.sensingStart = frame.swaths[0].sensingStart
|
||||
frame.prf = frame.swaths[0].prf
|
||||
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
|
||||
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
|
||||
|
||||
#update frame parameters, secondary
|
||||
#########################################################
|
||||
frame = secondaryTrack.frames[i]
|
||||
#mosaic size
|
||||
frame.numberOfSamples = frame.swaths[0].numberOfSamples
|
||||
frame.numberOfLines = frame.swaths[0].numberOfLines
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
frame.startingRange = frame.swaths[0].startingRange
|
||||
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
|
||||
frame.rangePixelSize = frame.swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
frame.sensingStart = frame.swaths[0].sensingStart
|
||||
frame.prf = frame.swaths[0].prf
|
||||
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
|
||||
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
|
||||
|
||||
|
||||
#mosaic reference slc
|
||||
#########################################################
|
||||
#choose offsets
|
||||
rangeOffsets = self._insar.frameRangeOffsetMatchingReference
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetMatchingReference
|
||||
|
||||
#list of input files
|
||||
slcs = []
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
swathDir = 's{}'.format(self._insar.startingSwath)
|
||||
slcs.append(os.path.join('../', frameDir, swathDir, self._insar.referenceSlc))
|
||||
|
||||
#note that track parameters are updated after mosaicking
|
||||
#parameters update is checked, it is OK.
|
||||
frameMosaic(referenceTrack, slcs, self._insar.referenceSlc,
|
||||
rangeOffsets, azimuthOffsets, 1, 1,
|
||||
updateTrack=True, phaseCompensation=True, resamplingMethod=2)
|
||||
create_xml(self._insar.referenceSlc, referenceTrack.numberOfSamples, referenceTrack.numberOfLines, 'slc')
|
||||
referenceTrack.dopplerVsPixel = computeTrackDoppler(referenceTrack)
|
||||
|
||||
#mosaic secondary slc
|
||||
#########################################################
|
||||
#choose offsets
|
||||
rangeOffsets = self._insar.frameRangeOffsetMatchingSecondary
|
||||
azimuthOffsets = self._insar.frameAzimuthOffsetMatchingSecondary
|
||||
|
||||
#list of input files
|
||||
slcs = []
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
swathDir = 's{}'.format(self._insar.startingSwath)
|
||||
slcs.append(os.path.join('../', frameDir, swathDir, self._insar.secondarySlc))
|
||||
|
||||
#note that track parameters are updated after mosaicking
|
||||
#parameters update is checked, it is OK.
|
||||
frameMosaic(secondaryTrack, slcs, self._insar.secondarySlc,
|
||||
rangeOffsets, azimuthOffsets, 1, 1,
|
||||
updateTrack=True, phaseCompensation=True, resamplingMethod=2)
|
||||
create_xml(self._insar.secondarySlc, secondaryTrack.numberOfSamples, secondaryTrack.numberOfLines, 'slc')
|
||||
secondaryTrack.dopplerVsPixel = computeTrackDoppler(secondaryTrack)
|
||||
|
||||
|
||||
#save parameter file inside denseoffset directory
|
||||
self._insar.saveProduct(referenceTrack, self._insar.referenceTrackParameter)
|
||||
self._insar.saveProduct(secondaryTrack, self._insar.secondaryTrackParameter)
|
||||
|
||||
|
||||
os.chdir('../')
|
||||
catalog.printToLog(logger, "runSlcMosaic")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def computeTrackDoppler(track):
|
||||
'''
|
||||
compute doppler for a track
|
||||
'''
|
||||
numberOfFrames = len(track.frames)
|
||||
dop = np.zeros(track.numberOfSamples)
|
||||
for i in range(numberOfFrames):
|
||||
index = track.startingRange + np.arange(track.numberOfSamples) * track.rangePixelSize
|
||||
index = (index - track.frames[i].swaths[0].startingRange) / track.frames[i].swaths[0].rangePixelSize
|
||||
dop = dop + np.polyval(track.frames[i].swaths[0].dopplerVsPixel[::-1], index)
|
||||
|
||||
index1 = np.arange(track.numberOfSamples)
|
||||
dop1 = dop/numberOfFrames
|
||||
p = np.polyfit(index1, dop1, 3)
|
||||
|
||||
return [p[3], p[2], p[1], p[0]]
|
||||
|
|
@ -0,0 +1,280 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
import mroipac
|
||||
from mroipac.ampcor.Ampcor import Ampcor
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import topo
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import geo2rdr
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import waterBodyRadar
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import reformatGeometricalOffset
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import writeOffset
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import computeOffsetFromOrbit
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runSlcOffset')
|
||||
|
||||
def runSlcOffset(self):
|
||||
'''estimate SLC offsets
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
print('\nInSAR processing not requested, skip this and the remaining InSAR steps...')
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
demFile = os.path.abspath(self._insar.dem)
|
||||
wbdFile = os.path.abspath(self._insar.wbd)
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
os.chdir(swathDir)
|
||||
|
||||
print('estimating offset frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
referenceSwath = referenceTrack.frames[i].swaths[j]
|
||||
secondarySwath = secondaryTrack.frames[i].swaths[j]
|
||||
|
||||
##########################################
|
||||
#1. set number of matching points
|
||||
##########################################
|
||||
#set initinial numbers
|
||||
if (self._insar.modeCombination == 21) or (self._insar.modeCombination == 22):
|
||||
numberOfOffsetsRange = 10
|
||||
numberOfOffsetsAzimuth = 40
|
||||
else:
|
||||
numberOfOffsetsRange = 20
|
||||
numberOfOffsetsAzimuth = 20
|
||||
|
||||
#change the initial numbers using water body
|
||||
if self.useWbdForNumberOffsets and (self._insar.wbd != None):
|
||||
numberRangeLooks=100
|
||||
numberAzimuthLooks=100
|
||||
#compute land ratio using topo module
|
||||
topo(referenceSwath, referenceTrack, demFile, 'lat.rdr', 'lon.rdr', 'hgt.rdr', losFile='los.rdr',
|
||||
incFile=None, mskFile=None,
|
||||
numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False)
|
||||
waterBodyRadar('lat.rdr', 'lon.rdr', wbdFile, 'wbd.rdr')
|
||||
|
||||
wbdImg = isceobj.createImage()
|
||||
wbdImg.load('wbd.rdr.xml')
|
||||
width = wbdImg.width
|
||||
length = wbdImg.length
|
||||
|
||||
wbd = np.fromfile('wbd.rdr', dtype=np.byte).reshape(length, width)
|
||||
landRatio = np.sum(wbd==0) / (length*width)
|
||||
|
||||
if (landRatio <= 0.00125):
|
||||
print('\n\nWARNING: land too small for estimating slc offsets at frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
print('proceed to use geometric offsets for forming interferogram')
|
||||
print('but please consider not using this swath\n\n')
|
||||
catalog.addItem('warning message', 'land too small for estimating slc offsets at frame {}, swath {}, use geometric offsets'.format(frameNumber, swathNumber), 'runSlcOffset')
|
||||
|
||||
#compute geomtricla offsets
|
||||
geo2rdr(secondarySwath, secondaryTrack, 'lat.rdr', 'lon.rdr', 'hgt.rdr', 'rg.rdr', 'az.rdr', numberRangeLooks=numberRangeLooks, numberAzimuthLooks=numberAzimuthLooks, multilookTimeOffset=False)
|
||||
reformatGeometricalOffset('rg.rdr', 'az.rdr', 'cull.off', rangeStep=numberRangeLooks, azimuthStep=numberAzimuthLooks, maximumNumberOfOffsets=2000)
|
||||
|
||||
os.remove('lat.rdr')
|
||||
os.remove('lat.rdr.vrt')
|
||||
os.remove('lat.rdr.xml')
|
||||
os.remove('lon.rdr')
|
||||
os.remove('lon.rdr.vrt')
|
||||
os.remove('lon.rdr.xml')
|
||||
os.remove('hgt.rdr')
|
||||
os.remove('hgt.rdr.vrt')
|
||||
os.remove('hgt.rdr.xml')
|
||||
os.remove('los.rdr')
|
||||
os.remove('los.rdr.vrt')
|
||||
os.remove('los.rdr.xml')
|
||||
os.remove('wbd.rdr')
|
||||
os.remove('wbd.rdr.vrt')
|
||||
os.remove('wbd.rdr.xml')
|
||||
|
||||
os.remove('rg.rdr')
|
||||
os.remove('rg.rdr.vrt')
|
||||
os.remove('rg.rdr.xml')
|
||||
os.remove('az.rdr')
|
||||
os.remove('az.rdr.vrt')
|
||||
os.remove('az.rdr.xml')
|
||||
|
||||
os.chdir('../')
|
||||
continue
|
||||
|
||||
|
||||
os.remove('lat.rdr')
|
||||
os.remove('lat.rdr.vrt')
|
||||
os.remove('lat.rdr.xml')
|
||||
os.remove('lon.rdr')
|
||||
os.remove('lon.rdr.vrt')
|
||||
os.remove('lon.rdr.xml')
|
||||
os.remove('hgt.rdr')
|
||||
os.remove('hgt.rdr.vrt')
|
||||
os.remove('hgt.rdr.xml')
|
||||
os.remove('los.rdr')
|
||||
os.remove('los.rdr.vrt')
|
||||
os.remove('los.rdr.xml')
|
||||
os.remove('wbd.rdr')
|
||||
os.remove('wbd.rdr.vrt')
|
||||
os.remove('wbd.rdr.xml')
|
||||
|
||||
#put the results on a grid with a specified interval
|
||||
interval = 0.2
|
||||
axisRatio = int(np.sqrt(landRatio)/interval)*interval + interval
|
||||
if axisRatio > 1:
|
||||
axisRatio = 1
|
||||
|
||||
numberOfOffsetsRange = int(numberOfOffsetsRange/axisRatio)
|
||||
numberOfOffsetsAzimuth = int(numberOfOffsetsAzimuth/axisRatio)
|
||||
else:
|
||||
catalog.addItem('warning message', 'no water mask used to determine number of matching points. frame {} swath {}'.format(frameNumber, swathNumber), 'runSlcOffset')
|
||||
|
||||
#user's settings
|
||||
if self.numberRangeOffsets != None:
|
||||
numberOfOffsetsRange = self.numberRangeOffsets[i][j]
|
||||
if self.numberAzimuthOffsets != None:
|
||||
numberOfOffsetsAzimuth = self.numberAzimuthOffsets[i][j]
|
||||
|
||||
catalog.addItem('number of offsets range frame {} swath {}'.format(frameNumber, swathNumber), numberOfOffsetsRange, 'runSlcOffset')
|
||||
catalog.addItem('number of offsets azimuth frame {} swath {}'.format(frameNumber, swathNumber), numberOfOffsetsAzimuth, 'runSlcOffset')
|
||||
|
||||
##########################################
|
||||
#2. match using ampcor
|
||||
##########################################
|
||||
ampcor = Ampcor(name='insarapp_slcs_ampcor')
|
||||
ampcor.configure()
|
||||
|
||||
mSLC = isceobj.createSlcImage()
|
||||
mSLC.load(self._insar.referenceSlc+'.xml')
|
||||
mSLC.setAccessMode('read')
|
||||
mSLC.createImage()
|
||||
|
||||
sSLC = isceobj.createSlcImage()
|
||||
sSLC.load(self._insar.secondarySlc+'.xml')
|
||||
sSLC.setAccessMode('read')
|
||||
sSLC.createImage()
|
||||
|
||||
ampcor.setImageDataType1('complex')
|
||||
ampcor.setImageDataType2('complex')
|
||||
|
||||
ampcor.setReferenceSlcImage(mSLC)
|
||||
ampcor.setSecondarySlcImage(sSLC)
|
||||
|
||||
#MATCH REGION
|
||||
#compute an offset at image center to use
|
||||
rgoff, azoff = computeOffsetFromOrbit(referenceSwath, referenceTrack, secondarySwath, secondaryTrack,
|
||||
referenceSwath.numberOfSamples * 0.5,
|
||||
referenceSwath.numberOfLines * 0.5)
|
||||
#it seems that we cannot use 0, haven't look into the problem
|
||||
if rgoff == 0:
|
||||
rgoff = 1
|
||||
if azoff == 0:
|
||||
azoff = 1
|
||||
firstSample = 1
|
||||
if rgoff < 0:
|
||||
firstSample = int(35 - rgoff)
|
||||
firstLine = 1
|
||||
if azoff < 0:
|
||||
firstLine = int(35 - azoff)
|
||||
ampcor.setAcrossGrossOffset(rgoff)
|
||||
ampcor.setDownGrossOffset(azoff)
|
||||
ampcor.setFirstSampleAcross(firstSample)
|
||||
ampcor.setLastSampleAcross(mSLC.width)
|
||||
ampcor.setNumberLocationAcross(numberOfOffsetsRange)
|
||||
ampcor.setFirstSampleDown(firstLine)
|
||||
ampcor.setLastSampleDown(mSLC.length)
|
||||
ampcor.setNumberLocationDown(numberOfOffsetsAzimuth)
|
||||
|
||||
#MATCH PARAMETERS
|
||||
#full-aperture mode
|
||||
if (self._insar.modeCombination == 21) or \
|
||||
(self._insar.modeCombination == 22) or \
|
||||
(self._insar.modeCombination == 31) or \
|
||||
(self._insar.modeCombination == 32):
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(512)
|
||||
#note this is the half width/length of search area, number of resulting correlation samples: 32*2+1
|
||||
ampcor.setSearchWindowSizeWidth(32)
|
||||
ampcor.setSearchWindowSizeHeight(32)
|
||||
#triggering full-aperture mode matching
|
||||
ampcor.setWinsizeFilt(8)
|
||||
ampcor.setOversamplingFactorFilt(64)
|
||||
#regular mode
|
||||
else:
|
||||
ampcor.setWindowSizeWidth(64)
|
||||
ampcor.setWindowSizeHeight(64)
|
||||
ampcor.setSearchWindowSizeWidth(32)
|
||||
ampcor.setSearchWindowSizeHeight(32)
|
||||
|
||||
#REST OF THE STUFF
|
||||
ampcor.setAcrossLooks(1)
|
||||
ampcor.setDownLooks(1)
|
||||
ampcor.setOversamplingFactor(64)
|
||||
ampcor.setZoomWindowSize(16)
|
||||
#1. The following not set
|
||||
#Matching Scale for Sample/Line Directions (-) = 1. 1.
|
||||
#should add the following in Ampcor.py?
|
||||
#if not set, in this case, Ampcor.py'value is also 1. 1.
|
||||
#ampcor.setScaleFactorX(1.)
|
||||
#ampcor.setScaleFactorY(1.)
|
||||
|
||||
#MATCH THRESHOLDS AND DEBUG DATA
|
||||
#2. The following not set
|
||||
#in roi_pac the value is set to 0 1
|
||||
#in isce the value is set to 0.001 1000.0
|
||||
#SNR and Covariance Thresholds (-) = {s1} {s2}
|
||||
#should add the following in Ampcor?
|
||||
#THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC
|
||||
#ampcor.setThresholdSNR(0)
|
||||
#ampcor.setThresholdCov(1)
|
||||
ampcor.setDebugFlag(False)
|
||||
ampcor.setDisplayFlag(False)
|
||||
|
||||
#in summary, only two things not set which are indicated by 'The following not set' above.
|
||||
|
||||
#run ampcor
|
||||
ampcor.ampcor()
|
||||
offsets = ampcor.getOffsetField()
|
||||
ampcorOffsetFile = 'ampcor.off'
|
||||
writeOffset(offsets, ampcorOffsetFile)
|
||||
|
||||
#finalize image, and re-create it
|
||||
#otherwise the file pointer is still at the end of the image
|
||||
mSLC.finalizeImage()
|
||||
sSLC.finalizeImage()
|
||||
|
||||
##########################################
|
||||
#3. cull offsets
|
||||
##########################################
|
||||
refinedOffsets = cullOffsets(offsets)
|
||||
if refinedOffsets == None:
|
||||
print('******************************************************************')
|
||||
print('WARNING: There are not enough offsets left, so we are forced to')
|
||||
print(' use offset without culling. frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
print('******************************************************************')
|
||||
catalog.addItem('warning message', 'not enough offsets left, use offset without culling. frame {} swath {}'.format(frameNumber, swathNumber), 'runSlcOffset')
|
||||
refinedOffsets = offsets
|
||||
|
||||
cullOffsetFile = 'cull.off'
|
||||
writeOffset(refinedOffsets, cullOffsetFile)
|
||||
|
||||
os.chdir('../')
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runSlcOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
|
@ -0,0 +1,685 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runSwathMosaic')
|
||||
|
||||
def runSwathMosaic(self):
|
||||
'''mosaic subswaths
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
|
||||
mosaicDir = 'mosaic'
|
||||
os.makedirs(mosaicDir, exist_ok=True)
|
||||
os.chdir(mosaicDir)
|
||||
|
||||
if not (
|
||||
((self._insar.modeCombination == 21) or \
|
||||
(self._insar.modeCombination == 22) or \
|
||||
(self._insar.modeCombination == 31) or \
|
||||
(self._insar.modeCombination == 32))
|
||||
and
|
||||
(self._insar.endingSwath-self._insar.startingSwath+1 > 1)
|
||||
):
|
||||
import shutil
|
||||
swathDir = 's{}'.format(referenceTrack.frames[i].swaths[0].swathNumber)
|
||||
|
||||
if not os.path.isfile(self._insar.interferogram):
|
||||
os.symlink(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram)
|
||||
shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
shutil.copy2(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
if not os.path.isfile(self._insar.amplitude):
|
||||
os.symlink(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude)
|
||||
shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
shutil.copy2(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
# os.rename(os.path.join('../', swathDir, self._insar.interferogram), self._insar.interferogram)
|
||||
# os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.vrt'), self._insar.interferogram+'.vrt')
|
||||
# os.rename(os.path.join('../', swathDir, self._insar.interferogram+'.xml'), self._insar.interferogram+'.xml')
|
||||
# os.rename(os.path.join('../', swathDir, self._insar.amplitude), self._insar.amplitude)
|
||||
# os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.vrt'), self._insar.amplitude+'.vrt')
|
||||
# os.rename(os.path.join('../', swathDir, self._insar.amplitude+'.xml'), self._insar.amplitude+'.xml')
|
||||
|
||||
#update frame parameters
|
||||
#########################################################
|
||||
frame = referenceTrack.frames[i]
|
||||
infImg = isceobj.createImage()
|
||||
infImg.load(self._insar.interferogram+'.xml')
|
||||
#mosaic size
|
||||
frame.numberOfSamples = infImg.width
|
||||
frame.numberOfLines = infImg.length
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
frame.startingRange = frame.swaths[0].startingRange
|
||||
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
|
||||
frame.rangePixelSize = frame.swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
frame.sensingStart = frame.swaths[0].sensingStart
|
||||
frame.prf = frame.swaths[0].prf
|
||||
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
|
||||
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
|
||||
|
||||
#update frame parameters, secondary
|
||||
#########################################################
|
||||
frame = secondaryTrack.frames[i]
|
||||
#mosaic size
|
||||
frame.numberOfSamples = int(frame.swaths[0].numberOfSamples/self._insar.numberRangeLooks1)
|
||||
frame.numberOfLines = int(frame.swaths[0].numberOfLines/self._insar.numberAzimuthLooks1)
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
frame.startingRange = frame.swaths[0].startingRange
|
||||
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
|
||||
frame.rangePixelSize = frame.swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
frame.sensingStart = frame.swaths[0].sensingStart
|
||||
frame.prf = frame.swaths[0].prf
|
||||
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
|
||||
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
#save parameter file
|
||||
self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter)
|
||||
self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
continue
|
||||
|
||||
#choose offsets
|
||||
numberOfFrames = len(referenceTrack.frames)
|
||||
numberOfSwaths = len(referenceTrack.frames[i].swaths)
|
||||
if self.swathOffsetMatching:
|
||||
#no need to do this as the API support 2-d list
|
||||
#rangeOffsets = (np.array(self._insar.swathRangeOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
#azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetMatchingReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
rangeOffsets = self._insar.swathRangeOffsetMatchingReference
|
||||
azimuthOffsets = self._insar.swathAzimuthOffsetMatchingReference
|
||||
|
||||
else:
|
||||
#rangeOffsets = (np.array(self._insar.swathRangeOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
#azimuthOffsets = (np.array(self._insar.swathAzimuthOffsetGeometricalReference)).reshape(numberOfFrames, numberOfSwaths)
|
||||
rangeOffsets = self._insar.swathRangeOffsetGeometricalReference
|
||||
azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalReference
|
||||
|
||||
rangeOffsets = rangeOffsets[i]
|
||||
azimuthOffsets = azimuthOffsets[i]
|
||||
|
||||
#list of input files
|
||||
inputInterferograms = []
|
||||
inputAmplitudes = []
|
||||
for j, swathNumber in enumerate(range(self._insar.startingSwath, self._insar.endingSwath + 1)):
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
inputInterferograms.append(os.path.join('../', swathDir, self._insar.interferogram))
|
||||
inputAmplitudes.append(os.path.join('../', swathDir, self._insar.amplitude))
|
||||
|
||||
#note that frame parameters are updated after mosaicking
|
||||
#mosaic amplitudes
|
||||
swathMosaic(referenceTrack.frames[i], inputAmplitudes, self._insar.amplitude,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, resamplingMethod=0)
|
||||
#mosaic interferograms
|
||||
swathMosaic(referenceTrack.frames[i], inputInterferograms, self._insar.interferogram,
|
||||
rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1, updateFrame=True, resamplingMethod=1)
|
||||
|
||||
create_xml(self._insar.amplitude, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'amp')
|
||||
create_xml(self._insar.interferogram, referenceTrack.frames[i].numberOfSamples, referenceTrack.frames[i].numberOfLines, 'int')
|
||||
|
||||
#update secondary frame parameters here
|
||||
#no matching for secondary, always use geometry
|
||||
rangeOffsets = self._insar.swathRangeOffsetGeometricalSecondary
|
||||
azimuthOffsets = self._insar.swathAzimuthOffsetGeometricalSecondary
|
||||
rangeOffsets = rangeOffsets[i]
|
||||
azimuthOffsets = azimuthOffsets[i]
|
||||
swathMosaicParameters(secondaryTrack.frames[i], rangeOffsets, azimuthOffsets, self._insar.numberRangeLooks1, self._insar.numberAzimuthLooks1)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
#save parameter file
|
||||
self._insar.saveProduct(referenceTrack.frames[i], self._insar.referenceFrameParameter)
|
||||
self._insar.saveProduct(secondaryTrack.frames[i], self._insar.secondaryFrameParameter)
|
||||
|
||||
os.chdir('../')
|
||||
|
||||
catalog.printToLog(logger, "runSwathMosaic")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def swathMosaic(frame, inputFiles, outputfile, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks, updateFrame=False, phaseCompensation=False, phaseDiff=None, phaseDiffFixed=None, snapThreshold=None, snapSwath=None, pcRangeLooks=1, pcAzimuthLooks=4, filt=False, resamplingMethod=0):
|
||||
'''
|
||||
mosaic swaths
|
||||
|
||||
#PART 1. REGULAR INPUT PARAMTERS
|
||||
frame: frame
|
||||
inputFiles: input file list
|
||||
outputfile: output mosaic file
|
||||
rangeOffsets: range offsets
|
||||
azimuthOffsets: azimuth offsets
|
||||
numberOfRangeLooks: number of range looks of the input files
|
||||
numberOfAzimuthLooks: number of azimuth looks of the input files
|
||||
updateFrame: whether update frame parameters
|
||||
|
||||
#PART 2. PARAMETERS FOR COMPUTING PHASE DIFFERENCE BETWEEN SUBSWATHS
|
||||
phaseCompensation: whether do phase compensation for each swath
|
||||
phaseDiff: pre-computed compensation phase for each swath
|
||||
phaseDiffFixed: if provided, the estimated value will snap to one of these values, which is nearest to the estimated one.
|
||||
snapThreshold: this is used with phaseDiffFixed
|
||||
snapSwath: indicate whether snap to fixed values for each swath phase diff, must be specified if phaseDiffFixed!=None
|
||||
pcRangeLooks: number of range looks to take when compute swath phase difference
|
||||
pcAzimuthLooks: number of azimuth looks to take when compute swath phase difference
|
||||
filt: whether do filtering when compute swath phase difference
|
||||
|
||||
#PART 3. RESAMPLING METHOD
|
||||
resamplingMethod: 0: amp resampling. 1: int resampling.
|
||||
'''
|
||||
from contrib.alos2proc_f.alos2proc_f import rect_with_looks
|
||||
from contrib.alos2proc.alos2proc import mosaicsubswath
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import multilook
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cal_coherence_1
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import filterInterferogram
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import computePhaseDiff
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import snap
|
||||
|
||||
numberOfSwaths = len(frame.swaths)
|
||||
swaths = frame.swaths
|
||||
|
||||
rangeScale = []
|
||||
azimuthScale = []
|
||||
rectWidth = []
|
||||
rectLength = []
|
||||
for i in range(numberOfSwaths):
|
||||
rangeScale.append(swaths[0].rangePixelSize / swaths[i].rangePixelSize)
|
||||
azimuthScale.append(swaths[0].azimuthLineInterval / swaths[i].azimuthLineInterval)
|
||||
if i == 0:
|
||||
rectWidth.append( int(swaths[i].numberOfSamples / numberOfRangeLooks) )
|
||||
rectLength.append( int(swaths[i].numberOfLines / numberOfAzimuthLooks) )
|
||||
else:
|
||||
rectWidth.append( round(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) )
|
||||
rectLength.append( round(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) )
|
||||
#rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) )
|
||||
#rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) )
|
||||
|
||||
#convert original offset to offset for images with looks
|
||||
#use list instead of np.array to make it consistent with the rest of the code
|
||||
rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets]
|
||||
azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets]
|
||||
|
||||
#get offset relative to the first frame
|
||||
rangeOffsets2 = [0.0]
|
||||
azimuthOffsets2 = [0.0]
|
||||
for i in range(1, numberOfSwaths):
|
||||
rangeOffsets2.append(0.0)
|
||||
azimuthOffsets2.append(0.0)
|
||||
for j in range(1, i+1):
|
||||
rangeOffsets2[i] += rangeOffsets1[j]
|
||||
azimuthOffsets2[i] += azimuthOffsets1[j]
|
||||
|
||||
#resample each swath
|
||||
rinfs = []
|
||||
for i, inf in enumerate(inputFiles):
|
||||
rinfs.append("{}_{}{}".format(os.path.splitext(os.path.basename(inf))[0], i, os.path.splitext(os.path.basename(inf))[1]))
|
||||
#do not resample first swath
|
||||
if i == 0:
|
||||
if os.path.isfile(rinfs[i]):
|
||||
os.remove(rinfs[i])
|
||||
os.symlink(inf, rinfs[i])
|
||||
else:
|
||||
#no need to resample
|
||||
if (abs(rangeOffsets2[i] - round(rangeOffsets2[i])) < 0.0001) and (abs(azimuthOffsets2[i] - round(azimuthOffsets2[i])) < 0.0001):
|
||||
if os.path.isfile(rinfs[i]):
|
||||
os.remove(rinfs[i])
|
||||
os.symlink(inf, rinfs[i])
|
||||
#all of the following use of rangeOffsets2/azimuthOffsets2 is inside int(), we do the following in case it is like
|
||||
#4.99999999999...
|
||||
rangeOffsets2[i] = round(rangeOffsets2[i])
|
||||
azimuthOffsets2[i] = round(azimuthOffsets2[i])
|
||||
else:
|
||||
infImg = isceobj.createImage()
|
||||
infImg.load(inf+'.xml')
|
||||
rangeOffsets2Frac = rangeOffsets2[i] - int(rangeOffsets2[i])
|
||||
azimuthOffsets2Frac = azimuthOffsets2[i] - int(azimuthOffsets2[i])
|
||||
|
||||
if resamplingMethod == 0:
|
||||
rect_with_looks(inf,
|
||||
rinfs[i],
|
||||
infImg.width, infImg.length,
|
||||
rectWidth[i], rectLength[i],
|
||||
rangeScale[i], 0.0,
|
||||
0.0,azimuthScale[i],
|
||||
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
|
||||
1,1,
|
||||
1,1,
|
||||
'COMPLEX',
|
||||
'Bilinear')
|
||||
elif resamplingMethod == 1:
|
||||
#decompose amplitude and phase
|
||||
phaseFile = 'phase'
|
||||
amplitudeFile = 'amplitude'
|
||||
data = np.fromfile(inf, dtype=np.complex64).reshape(infImg.length, infImg.width)
|
||||
phase = np.exp(np.complex64(1j) * np.angle(data))
|
||||
phase[np.nonzero(data==0)] = 0
|
||||
phase.astype(np.complex64).tofile(phaseFile)
|
||||
amplitude = np.absolute(data)
|
||||
amplitude.astype(np.float32).tofile(amplitudeFile)
|
||||
|
||||
#resampling
|
||||
phaseRectFile = 'phaseRect'
|
||||
amplitudeRectFile = 'amplitudeRect'
|
||||
rect_with_looks(phaseFile,
|
||||
phaseRectFile,
|
||||
infImg.width, infImg.length,
|
||||
rectWidth[i], rectLength[i],
|
||||
rangeScale[i], 0.0,
|
||||
0.0,azimuthScale[i],
|
||||
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
|
||||
1,1,
|
||||
1,1,
|
||||
'COMPLEX',
|
||||
'Sinc')
|
||||
rect_with_looks(amplitudeFile,
|
||||
amplitudeRectFile,
|
||||
infImg.width, infImg.length,
|
||||
rectWidth[i], rectLength[i],
|
||||
rangeScale[i], 0.0,
|
||||
0.0,azimuthScale[i],
|
||||
rangeOffsets2Frac * rangeScale[i], azimuthOffsets2Frac * azimuthScale[i],
|
||||
1,1,
|
||||
1,1,
|
||||
'REAL',
|
||||
'Bilinear')
|
||||
|
||||
#recombine amplitude and phase
|
||||
phase = np.fromfile(phaseRectFile, dtype=np.complex64).reshape(rectLength[i], rectWidth[i])
|
||||
amplitude = np.fromfile(amplitudeRectFile, dtype=np.float32).reshape(rectLength[i], rectWidth[i])
|
||||
(phase*amplitude).astype(np.complex64).tofile(rinfs[i])
|
||||
|
||||
#tidy up
|
||||
os.remove(phaseFile)
|
||||
os.remove(amplitudeFile)
|
||||
os.remove(phaseRectFile)
|
||||
os.remove(amplitudeRectFile)
|
||||
|
||||
|
||||
#determine output width and length
|
||||
#actually no need to calculate in range direction
|
||||
xs = []
|
||||
xe = []
|
||||
ys = []
|
||||
ye = []
|
||||
for i in range(numberOfSwaths):
|
||||
if i == 0:
|
||||
xs.append(0)
|
||||
xe.append(rectWidth[i] - 1)
|
||||
ys.append(0)
|
||||
ye.append(rectLength[i] - 1)
|
||||
else:
|
||||
xs.append(0 - int(rangeOffsets2[i]))
|
||||
xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i]))
|
||||
ys.append(0 - int(azimuthOffsets2[i]))
|
||||
ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i]))
|
||||
|
||||
(xmin, xminIndex) = min((v,i) for i,v in enumerate(xs))
|
||||
(xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe))
|
||||
(ymin, yminIndex) = min((v,i) for i,v in enumerate(ys))
|
||||
(ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye))
|
||||
|
||||
outWidth = xmax - xmin + 1
|
||||
outLength = ymax - ymin + 1
|
||||
|
||||
#prepare offset for mosaicing
|
||||
rangeOffsets3 = []
|
||||
azimuthOffsets3 = []
|
||||
for i in range(numberOfSwaths):
|
||||
azimuthOffsets3.append(int(azimuthOffsets2[i]) - int(azimuthOffsets2[yminIndex]))
|
||||
if i != 0:
|
||||
rangeOffsets3.append(int(rangeOffsets2[i]) - int(rangeOffsets2[i-1]))
|
||||
else:
|
||||
rangeOffsets3.append(0)
|
||||
|
||||
|
||||
delta = int(30 / numberOfRangeLooks)
|
||||
|
||||
#compute compensation phase for each swath
|
||||
diffMean2 = [0.0 for i in range(numberOfSwaths)]
|
||||
phaseDiffEst = [None for i in range(numberOfSwaths)]
|
||||
#True if:
|
||||
# (1) used diff phase from input
|
||||
# (2) used estimated diff phase after snapping to a fixed diff phase provided
|
||||
#False if:
|
||||
# (1) used purely estimated diff phase
|
||||
phaseDiffSource = ['estimated' for i in range(numberOfSwaths)]
|
||||
# 1. 'estimated': estimated from subswath overlap
|
||||
# 2. 'estimated+snap': estimated from subswath overlap and snap to a fixed value
|
||||
# 3. 'input': pre-computed
|
||||
# confidence level: 3 > 2 > 1
|
||||
numberOfValidSamples = [None for i in range(numberOfSwaths)]
|
||||
# only record when (filt == False) and (index[0].size >= 4000)
|
||||
if phaseCompensation:
|
||||
#compute swath phase offset
|
||||
diffMean = [0.0]
|
||||
for i in range(1, numberOfSwaths):
|
||||
|
||||
#no need to estimate diff phase if provided from input
|
||||
#####################################################################
|
||||
if phaseDiff!=None:
|
||||
if phaseDiff[i]!=None:
|
||||
diffMean.append(phaseDiff[i])
|
||||
phaseDiffSource[i] = 'input'
|
||||
print('using pre-computed phase offset given from input')
|
||||
print('phase offset: subswath{} - subswath{}: {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, phaseDiff[i]))
|
||||
continue
|
||||
#####################################################################
|
||||
|
||||
#all indexes start with zero, all the computed start/end sample/line indexes are included.
|
||||
|
||||
#no need to add edge here, as we are going to find first/last nonzero sample/lines later
|
||||
#edge = delta
|
||||
edge = 0
|
||||
|
||||
#image i-1
|
||||
startSample1 = edge + 0 - int(rangeOffsets2[i]) + int(rangeOffsets2[i-1])
|
||||
endSample1 = -edge + rectWidth[i-1]-1
|
||||
startLine1 = edge + max(0 - int(azimuthOffsets2[i]) + int(azimuthOffsets2[i-1]), 0)
|
||||
endLine1 = -edge + min(rectLength[i]-1 - int(azimuthOffsets2[i]) + int(azimuthOffsets2[i-1]), rectLength[i-1]-1)
|
||||
data1 = readImage(rinfs[i-1], rectWidth[i-1], rectLength[i-1], startSample1, endSample1, startLine1, endLine1)
|
||||
|
||||
#image i
|
||||
startSample2 = edge + 0
|
||||
endSample2 = -edge + rectWidth[i-1]-1 - int(rangeOffsets2[i-1]) + int(rangeOffsets2[i])
|
||||
startLine2 = edge + max(0 - int(azimuthOffsets2[i-1]) + int(azimuthOffsets2[i]), 0)
|
||||
endLine2 = -edge + min(rectLength[i-1]-1 - int(azimuthOffsets2[i-1]) + int(azimuthOffsets2[i]), rectLength[i]-1)
|
||||
data2 = readImage(rinfs[i], rectWidth[i], rectLength[i], startSample2, endSample2, startLine2, endLine2)
|
||||
|
||||
#remove edge due to incomplete covolution in resampling
|
||||
edge = 9
|
||||
(startLine0, endLine0, startSample0, endSample0) = findNonzero( np.logical_and((data1!=0), (data2!=0)) )
|
||||
data1 = data1[startLine0+edge:endLine0+1-edge, startSample0+edge:endSample0+1-edge]
|
||||
data2 = data2[startLine0+edge:endLine0+1-edge, startSample0+edge:endSample0+1-edge]
|
||||
|
||||
#take looks
|
||||
data1 = multilook(data1, pcAzimuthLooks, pcRangeLooks)
|
||||
data2 = multilook(data2, pcAzimuthLooks, pcRangeLooks)
|
||||
|
||||
#filter
|
||||
if filt:
|
||||
data1 /= (np.absolute(data1)+(data1==0))
|
||||
data2 /= (np.absolute(data2)+(data2==0))
|
||||
data1 = filterInterferogram(data1, 3.0, 64, 1)
|
||||
data2 = filterInterferogram(data2, 3.0, 64, 1)
|
||||
|
||||
|
||||
#get difference
|
||||
dataDiff = data1 * np.conj(data2)
|
||||
cor = cal_coherence_1(dataDiff, win=5)
|
||||
index = np.nonzero(np.logical_and(cor>0.85, dataDiff!=0))
|
||||
|
||||
DEBUG=False
|
||||
if DEBUG:
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
(length7, width7)=dataDiff.shape
|
||||
filename = 'diff_ori_s{}-s{}.int'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber)
|
||||
dataDiff.astype(np.complex64).tofile(filename)
|
||||
create_xml(filename, width7, length7, 'int')
|
||||
filename = 'cor_ori_s{}-s{}.cor'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber)
|
||||
cor.astype(np.float32).tofile(filename)
|
||||
create_xml(filename, width7, length7, 'float')
|
||||
|
||||
print('\ncompute phase difference between subswaths {} and {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber))
|
||||
print('number of pixels with coherence > 0.85: {}'.format(index[0].size))
|
||||
|
||||
#if already filtered the subswath overlap interferograms (MAI), do not filtered differential interferograms
|
||||
if (filt == False) and (index[0].size < 4000):
|
||||
#coherence too low, filter subswath overlap differential interferogram
|
||||
diffMean0 = 0.0
|
||||
breakFlag = False
|
||||
for (filterStrength, filterWinSize) in zip([3.0, 9.0], [64, 128]):
|
||||
dataDiff = data1 * np.conj(data2)
|
||||
dataDiff /= (np.absolute(dataDiff)+(dataDiff==0))
|
||||
dataDiff = filterInterferogram(dataDiff, filterStrength, filterWinSize, 1)
|
||||
cor = cal_coherence_1(dataDiff, win=7)
|
||||
|
||||
DEBUG=False
|
||||
if DEBUG:
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
(length7, width7)=dataDiff.shape
|
||||
filename = 'diff_filt_s{}-s{}_strength_{}_winsize_{}.int'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, filterStrength, filterWinSize)
|
||||
dataDiff.astype(np.complex64).tofile(filename)
|
||||
create_xml(filename, width7, length7, 'int')
|
||||
filename = 'cor_filt_s{}-s{}_strength_{}_winsize_{}.cor'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, filterStrength, filterWinSize)
|
||||
cor.astype(np.float32).tofile(filename)
|
||||
create_xml(filename, width7, length7, 'float')
|
||||
|
||||
for corth in [0.99999, 0.9999]:
|
||||
index = np.nonzero(np.logical_and(cor>corth, dataDiff!=0))
|
||||
if index[0].size > 30000:
|
||||
breakFlag = True
|
||||
break
|
||||
if breakFlag:
|
||||
break
|
||||
|
||||
if index[0].size < 100:
|
||||
diffMean0 = 0.0
|
||||
print('\n\nWARNING: too few high coherence pixels for swath phase difference estimation')
|
||||
print(' number of high coherence pixels: {}\n\n'.format(index[0].size))
|
||||
else:
|
||||
print('filtered coherence threshold used: {}, number of pixels used: {}'.format(corth, index[0].size))
|
||||
angle = np.mean(np.angle(dataDiff[index]), dtype=np.float64)
|
||||
diffMean0 += angle
|
||||
data2 *= np.exp(np.complex64(1j) * angle)
|
||||
print('phase offset: %15.12f rad with filter strength: %f, window size: %3d'%(diffMean0, filterStrength, filterWinSize))
|
||||
else:
|
||||
if filt:
|
||||
(diffMean0, numberOfValidSamples[i]) = computePhaseDiff(data1, data2, coherenceWindowSize=5, coherenceThreshold=0.95)
|
||||
else:
|
||||
(diffMean0, numberOfValidSamples[i]) = computePhaseDiff(data1, data2, coherenceWindowSize=5, coherenceThreshold=0.85)
|
||||
if numberOfValidSamples[i] < 100:
|
||||
diffMean0 = 0.0
|
||||
print('\n\nWARNING: too few high coherence pixels for swath phase difference estimation')
|
||||
print(' number of high coherence pixels: {}\n\n'.format(numberOfValidSamples[i]))
|
||||
|
||||
#do not record when filt
|
||||
if filt:
|
||||
numberOfValidSamples[i] = None
|
||||
|
||||
|
||||
#save purely estimated diff phase
|
||||
phaseDiffEst[i] = diffMean0
|
||||
|
||||
#if fixed diff phase provided and the estimated diff phase is close enough to a fixed value, snap to it
|
||||
if phaseDiffFixed != None:
|
||||
if snapSwath[i-1] == True:
|
||||
(outputValue, snapped) = snap(diffMean0, phaseDiffFixed, snapThreshold)
|
||||
if snapped == True:
|
||||
diffMean0 = outputValue
|
||||
phaseDiffSource[i] = 'estimated+snap'
|
||||
|
||||
diffMean.append(diffMean0)
|
||||
print('phase offset: subswath{} - subswath{}: {}'.format(frame.swaths[i-1].swathNumber, frame.swaths[i].swathNumber, diffMean0))
|
||||
|
||||
for i in range(1, numberOfSwaths):
|
||||
for j in range(1, i+1):
|
||||
diffMean2[i] += diffMean[j]
|
||||
|
||||
|
||||
#mosaic swaths
|
||||
diffflag = 1
|
||||
oflag = [0 for i in range(numberOfSwaths)]
|
||||
mosaicsubswath(outputfile, outWidth, outLength, delta, diffflag, numberOfSwaths,
|
||||
rinfs, rectWidth, rangeOffsets3, azimuthOffsets3, diffMean2, oflag)
|
||||
#remove tmp files
|
||||
for x in rinfs:
|
||||
os.remove(x)
|
||||
|
||||
|
||||
#update frame parameters
|
||||
if updateFrame:
|
||||
#mosaic size
|
||||
frame.numberOfSamples = outWidth
|
||||
frame.numberOfLines = outLength
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
frame.startingRange = frame.swaths[0].startingRange
|
||||
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
|
||||
frame.rangePixelSize = frame.swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
azimuthTimeOffset = - max([int(x) for x in azimuthOffsets2]) * numberOfAzimuthLooks * frame.swaths[0].azimuthLineInterval
|
||||
frame.sensingStart = frame.swaths[0].sensingStart + datetime.timedelta(seconds = azimuthTimeOffset)
|
||||
frame.prf = frame.swaths[0].prf
|
||||
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
|
||||
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
|
||||
|
||||
|
||||
if phaseCompensation:
|
||||
# estimated phase diff, used phase diff, used phase diff source
|
||||
return (phaseDiffEst, diffMean, phaseDiffSource, numberOfValidSamples)
|
||||
|
||||
def swathMosaicParameters(frame, rangeOffsets, azimuthOffsets, numberOfRangeLooks, numberOfAzimuthLooks):
|
||||
'''
|
||||
mosaic swaths (this is simplified version of swathMosaic to update parameters only)
|
||||
|
||||
frame: frame
|
||||
rangeOffsets: range offsets
|
||||
azimuthOffsets: azimuth offsets
|
||||
numberOfRangeLooks: number of range looks of the input files
|
||||
numberOfAzimuthLooks: number of azimuth looks of the input files
|
||||
'''
|
||||
|
||||
numberOfSwaths = len(frame.swaths)
|
||||
swaths = frame.swaths
|
||||
|
||||
rangeScale = []
|
||||
azimuthScale = []
|
||||
rectWidth = []
|
||||
rectLength = []
|
||||
for i in range(numberOfSwaths):
|
||||
rangeScale.append(swaths[0].rangePixelSize / swaths[i].rangePixelSize)
|
||||
azimuthScale.append(swaths[0].azimuthLineInterval / swaths[i].azimuthLineInterval)
|
||||
if i == 0:
|
||||
rectWidth.append( int(swaths[i].numberOfSamples / numberOfRangeLooks) )
|
||||
rectLength.append( int(swaths[i].numberOfLines / numberOfAzimuthLooks) )
|
||||
else:
|
||||
rectWidth.append( int(1.0 / rangeScale[i] * int(swaths[i].numberOfSamples / numberOfRangeLooks)) )
|
||||
rectLength.append( int(1.0 / azimuthScale[i] * int(swaths[i].numberOfLines / numberOfAzimuthLooks)) )
|
||||
|
||||
#convert original offset to offset for images with looks
|
||||
#use list instead of np.array to make it consistent with the rest of the code
|
||||
rangeOffsets1 = [i/numberOfRangeLooks for i in rangeOffsets]
|
||||
azimuthOffsets1 = [i/numberOfAzimuthLooks for i in azimuthOffsets]
|
||||
|
||||
#get offset relative to the first frame
|
||||
rangeOffsets2 = [0.0]
|
||||
azimuthOffsets2 = [0.0]
|
||||
for i in range(1, numberOfSwaths):
|
||||
rangeOffsets2.append(0.0)
|
||||
azimuthOffsets2.append(0.0)
|
||||
for j in range(1, i+1):
|
||||
rangeOffsets2[i] += rangeOffsets1[j]
|
||||
azimuthOffsets2[i] += azimuthOffsets1[j]
|
||||
|
||||
#determine output width and length
|
||||
#actually no need to calculate in range direction
|
||||
xs = []
|
||||
xe = []
|
||||
ys = []
|
||||
ye = []
|
||||
for i in range(numberOfSwaths):
|
||||
if i == 0:
|
||||
xs.append(0)
|
||||
xe.append(rectWidth[i] - 1)
|
||||
ys.append(0)
|
||||
ye.append(rectLength[i] - 1)
|
||||
else:
|
||||
xs.append(0 - int(rangeOffsets2[i]))
|
||||
xe.append(rectWidth[i] - 1 - int(rangeOffsets2[i]))
|
||||
ys.append(0 - int(azimuthOffsets2[i]))
|
||||
ye.append(rectLength[i] - 1 - int(azimuthOffsets2[i]))
|
||||
|
||||
(xmin, xminIndex) = min((v,i) for i,v in enumerate(xs))
|
||||
(xmax, xmaxIndex) = max((v,i) for i,v in enumerate(xe))
|
||||
(ymin, yminIndex) = min((v,i) for i,v in enumerate(ys))
|
||||
(ymax, ymaxIndex) = max((v,i) for i,v in enumerate(ye))
|
||||
|
||||
outWidth = xmax - xmin + 1
|
||||
outLength = ymax - ymin + 1
|
||||
|
||||
#update frame parameters
|
||||
#mosaic size
|
||||
frame.numberOfSamples = outWidth
|
||||
frame.numberOfLines = outLength
|
||||
#NOTE THAT WE ARE STILL USING SINGLE LOOK PARAMETERS HERE
|
||||
#range parameters
|
||||
frame.startingRange = frame.swaths[0].startingRange
|
||||
frame.rangeSamplingRate = frame.swaths[0].rangeSamplingRate
|
||||
frame.rangePixelSize = frame.swaths[0].rangePixelSize
|
||||
#azimuth parameters
|
||||
azimuthTimeOffset = - max([int(x) for x in azimuthOffsets2]) * numberOfAzimuthLooks * frame.swaths[0].azimuthLineInterval
|
||||
frame.sensingStart = frame.swaths[0].sensingStart + datetime.timedelta(seconds = azimuthTimeOffset)
|
||||
frame.prf = frame.swaths[0].prf
|
||||
frame.azimuthPixelSize = frame.swaths[0].azimuthPixelSize
|
||||
frame.azimuthLineInterval = frame.swaths[0].azimuthLineInterval
|
||||
|
||||
|
||||
def readImage(inputfile, numberOfSamples, numberOfLines, startSample, endSample, startLine, endLine):
|
||||
'''
|
||||
read a chunk of image
|
||||
the indexes (startSample, endSample, startLine, endLine) are included and start with zero
|
||||
|
||||
memmap is not used, because it is much slower
|
||||
'''
|
||||
data = np.zeros((endLine-startLine+1, endSample-startSample+1), dtype=np.complex64)
|
||||
with open(inputfile,'rb') as fp:
|
||||
#for i in range(endLine-startLine+1):
|
||||
for i in range(startLine, endLine+1):
|
||||
fp.seek((i*numberOfSamples+startSample)*8, 0)
|
||||
data[i-startLine] = np.fromfile(fp, dtype=np.complex64, count=endSample-startSample+1)
|
||||
return data
|
||||
|
||||
|
||||
def findNonzero_v1(data):
|
||||
'''
|
||||
find the first/last non-zero line/sample
|
||||
all indexes start from zero
|
||||
'''
|
||||
indexes = np.nonzero(data)
|
||||
|
||||
#first line last line first sample last sample
|
||||
return (indexes[0][0], indexes[0][-1], indexes[1][0], indexes[1][-1])
|
||||
|
||||
|
||||
def findNonzero(data, lineRatio=0.5, sampleRatio=0.5):
|
||||
'''
|
||||
find the first/last non-zero line/sample
|
||||
all indexes start from zero
|
||||
'''
|
||||
import numpy as np
|
||||
|
||||
(length, width)=data.shape
|
||||
|
||||
lineIndex = (np.nonzero(np.sum((data!=0), axis=1) > width*lineRatio))[0]
|
||||
sampleIndex = (np.nonzero(np.sum((data!=0), axis=0) > length*sampleRatio))[0]
|
||||
|
||||
#first line last line first sample last sample
|
||||
return (lineIndex[0], lineIndex[-1], sampleIndex[0], sampleIndex[-1])
|
||||
|
||||
|
||||
|
|
@ -0,0 +1,393 @@
|
|||
#
|
||||
# Author: Cunren Liang
|
||||
# Copyright 2015-present, NASA-JPL/Caltech
|
||||
#
|
||||
|
||||
import os
|
||||
import glob
|
||||
import logging
|
||||
import datetime
|
||||
import numpy as np
|
||||
|
||||
import isceobj
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import multilook
|
||||
|
||||
|
||||
logger = logging.getLogger('isce.alos2insar.runSwathOffset')
|
||||
|
||||
def runSwathOffset(self):
|
||||
'''estimate swath offsets.
|
||||
'''
|
||||
if hasattr(self, 'doInSAR'):
|
||||
if not self.doInSAR:
|
||||
return
|
||||
|
||||
catalog = isceobj.Catalog.createCatalog(self._insar.procDoc.name)
|
||||
self.updateParamemetersFromUser()
|
||||
|
||||
referenceTrack = self._insar.loadTrack(reference=True)
|
||||
secondaryTrack = self._insar.loadTrack(reference=False)
|
||||
|
||||
for i, frameNumber in enumerate(self._insar.referenceFrames):
|
||||
frameDir = 'f{}_{}'.format(i+1, frameNumber)
|
||||
os.chdir(frameDir)
|
||||
|
||||
mosaicDir = 'mosaic'
|
||||
os.makedirs(mosaicDir, exist_ok=True)
|
||||
os.chdir(mosaicDir)
|
||||
|
||||
if not (
|
||||
((self._insar.modeCombination == 21) or \
|
||||
(self._insar.modeCombination == 22) or \
|
||||
(self._insar.modeCombination == 31) or \
|
||||
(self._insar.modeCombination == 32))
|
||||
and
|
||||
(self._insar.endingSwath-self._insar.startingSwath+1 > 1)
|
||||
):
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
continue
|
||||
|
||||
#compute swath offset
|
||||
offsetReference = swathOffset(referenceTrack.frames[i], self._insar.referenceSlc, self._insar.referenceSwathOffset,
|
||||
crossCorrelation=self.swathOffsetMatching, numberOfAzimuthLooks=10)
|
||||
#only use geometrical offset for secondary
|
||||
offsetSecondary = swathOffset(secondaryTrack.frames[i], self._insar.secondarySlc, self._insar.secondarySwathOffset,
|
||||
crossCorrelation=False, numberOfAzimuthLooks=10)
|
||||
|
||||
#initialization
|
||||
if i == 0:
|
||||
self._insar.swathRangeOffsetGeometricalReference = []
|
||||
self._insar.swathAzimuthOffsetGeometricalReference = []
|
||||
self._insar.swathRangeOffsetGeometricalSecondary = []
|
||||
self._insar.swathAzimuthOffsetGeometricalSecondary = []
|
||||
if self.swathOffsetMatching:
|
||||
self._insar.swathRangeOffsetMatchingReference = []
|
||||
self._insar.swathAzimuthOffsetMatchingReference = []
|
||||
#self._insar.swathRangeOffsetMatchingSecondary = []
|
||||
#self._insar.swathAzimuthOffsetMatchingSecondary = []
|
||||
|
||||
#append list directly, as the API support 2-d list
|
||||
self._insar.swathRangeOffsetGeometricalReference.append(offsetReference[0])
|
||||
self._insar.swathAzimuthOffsetGeometricalReference.append(offsetReference[1])
|
||||
self._insar.swathRangeOffsetGeometricalSecondary.append(offsetSecondary[0])
|
||||
self._insar.swathAzimuthOffsetGeometricalSecondary.append(offsetSecondary[1])
|
||||
if self.swathOffsetMatching:
|
||||
self._insar.swathRangeOffsetMatchingReference.append(offsetReference[2])
|
||||
self._insar.swathAzimuthOffsetMatchingReference.append(offsetReference[3])
|
||||
#self._insar.swathRangeOffsetMatchingSecondary.append(offsetSecondary[2])
|
||||
#self._insar.swathAzimuthOffsetMatchingSecondary.append(offsetSecondary[3])
|
||||
|
||||
os.chdir('../../')
|
||||
|
||||
catalog.printToLog(logger, "runSwathOffset")
|
||||
self._insar.procDoc.addAllFromCatalog(catalog)
|
||||
|
||||
|
||||
def swathOffset(frame, image, outputfile, crossCorrelation=True, numberOfAzimuthLooks=10):
|
||||
'''
|
||||
compute swath offset
|
||||
frame: frame object
|
||||
image: image for doing matching
|
||||
outputfile: output txt file for saving swath offset
|
||||
crossCorrelation: whether do matching
|
||||
numberOfAzimuthLooks: number of looks to take in azimuth before matching
|
||||
'''
|
||||
|
||||
rangeOffsetGeometrical = []
|
||||
azimuthOffsetGeometrical = []
|
||||
rangeOffsetMatching = []
|
||||
azimuthOffsetMatching = []
|
||||
|
||||
for j in range(len(frame.swaths)):
|
||||
frameNumber = frame.frameNumber
|
||||
swathNumber = frame.swaths[j].swathNumber
|
||||
swathDir = 's{}'.format(swathNumber)
|
||||
|
||||
print('estimate offset frame {}, swath {}'.format(frameNumber, swathNumber))
|
||||
|
||||
if j == 0:
|
||||
rangeOffsetGeometrical.append(0.0)
|
||||
azimuthOffsetGeometrical.append(0.0)
|
||||
rangeOffsetMatching.append(0.0)
|
||||
azimuthOffsetMatching.append(0.0)
|
||||
swathDirLast = swathDir
|
||||
continue
|
||||
|
||||
image1 = os.path.join('../', swathDirLast, image)
|
||||
image2 = os.path.join('../', swathDir, image)
|
||||
swath0 = frame.swaths[0]
|
||||
swath1 = frame.swaths[j-1]
|
||||
swath2 = frame.swaths[j]
|
||||
|
||||
rangeScale1 = swath0.rangePixelSize / swath1.rangePixelSize
|
||||
azimuthScale1 = swath0.azimuthLineInterval / swath1.azimuthLineInterval
|
||||
rangeScale2 = swath0.rangePixelSize / swath2.rangePixelSize
|
||||
azimuthScale2 = swath0.azimuthLineInterval / swath2.azimuthLineInterval
|
||||
|
||||
#offset from geometry
|
||||
offsetGeometrical = computeSwathOffset(swath1, swath2, rangeScale1, azimuthScale1)
|
||||
rangeOffsetGeometrical.append(offsetGeometrical[0])
|
||||
azimuthOffsetGeometrical.append(offsetGeometrical[1])
|
||||
|
||||
#offset from cross-correlation
|
||||
if crossCorrelation:
|
||||
offsetMatching = estimateSwathOffset(swath1, swath2, image1, image2, rangeScale1,
|
||||
azimuthScale1, rangeScale2, azimuthScale2, numberOfAzimuthLooks)
|
||||
if offsetMatching != None:
|
||||
rangeOffsetMatching.append(offsetMatching[0])
|
||||
azimuthOffsetMatching.append(offsetMatching[1])
|
||||
else:
|
||||
print('******************************************************************')
|
||||
print('WARNING: bad matching offset, we are forced to use')
|
||||
print(' geometrical offset for swath mosaicking')
|
||||
print('******************************************************************')
|
||||
rangeOffsetMatching.append(offsetGeometrical[0])
|
||||
azimuthOffsetMatching.append(offsetGeometrical[1])
|
||||
|
||||
swathDirLast = swathDir
|
||||
|
||||
|
||||
if crossCorrelation:
|
||||
offsetComp = "\n\ncomparision of offsets:\n\n"
|
||||
offsetComp += "offset type i geometrical match difference\n"
|
||||
offsetComp += "+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++\n"
|
||||
for i, (offset1, offset2) in enumerate(zip(rangeOffsetGeometrical, rangeOffsetMatching)):
|
||||
offsetComp += "range offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2)
|
||||
for i, (offset1, offset2) in enumerate(zip(azimuthOffsetGeometrical, azimuthOffsetMatching)):
|
||||
offsetComp += "azimuth offset {:2d} {:13.3f} {:13.3f} {:13.3f}\n".format(i, offset1, offset2, offset1 - offset2)
|
||||
|
||||
#write and report offsets
|
||||
with open(outputfile, 'w') as f:
|
||||
f.write(offsetComp)
|
||||
print("{}".format(offsetComp))
|
||||
|
||||
|
||||
if crossCorrelation:
|
||||
return (rangeOffsetGeometrical, azimuthOffsetGeometrical, rangeOffsetMatching, azimuthOffsetMatching)
|
||||
else:
|
||||
return (rangeOffsetGeometrical, azimuthOffsetGeometrical)
|
||||
|
||||
|
||||
def computeSwathOffset(swath1, swath2, rangeScale1=1, azimuthScale1=1):
|
||||
|
||||
rangeOffset = -(swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize
|
||||
azimuthOffset = -((swath2.sensingStart - swath1.sensingStart).total_seconds()) / swath1.azimuthLineInterval
|
||||
|
||||
rangeOffset /= rangeScale1
|
||||
azimuthOffset /= azimuthScale1
|
||||
|
||||
return (rangeOffset, azimuthOffset)
|
||||
|
||||
|
||||
def estimateSwathOffset(swath1, swath2, image1, image2, rangeScale1=1, azimuthScale1=1, rangeScale2=1, azimuthScale2=1, numberOfAzimuthLooks=10):
|
||||
'''
|
||||
estimate offset of two adjacent swaths using matching
|
||||
'''
|
||||
from osgeo import gdal
|
||||
import isceobj
|
||||
from contrib.alos2proc_f.alos2proc_f import rect_with_looks
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import create_xml
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import cullOffsets
|
||||
from isceobj.Alos2Proc.Alos2ProcPublic import meanOffset
|
||||
from mroipac.ampcor.Ampcor import Ampcor
|
||||
|
||||
|
||||
#processing image 1
|
||||
rangeOff1 = int((swath2.startingRange - swath1.startingRange) / swath1.rangePixelSize)
|
||||
if rangeOff1 < 0:
|
||||
rangeOff1 = 0
|
||||
numberOfSamples1 = swath1.numberOfSamples - rangeOff1
|
||||
|
||||
numberOfSamplesRect1 = int(numberOfSamples1/rangeScale1)
|
||||
numberOfLinesRect1 = int(swath1.numberOfLines/azimuthScale1)
|
||||
|
||||
numberOfSamplesLook1 = int(numberOfSamplesRect1/1)
|
||||
numberOfLinesLook1 = int(numberOfLinesRect1/numberOfAzimuthLooks)
|
||||
|
||||
#get magnitude image whether complex or not
|
||||
#ReadAsArray: https://pcjericks.github.io/py-gdalogr-cookbook/raster_layers.html
|
||||
ds = gdal.Open(image1 + '.vrt', gdal.GA_ReadOnly)
|
||||
data = ds.ReadAsArray(rangeOff1, 0, numberOfSamples1, swath1.numberOfLines)
|
||||
ds = None
|
||||
(np.absolute(data)).astype(np.float32).tofile('image1.float')
|
||||
|
||||
#rectify
|
||||
if rangeScale1 == 1 and azimuthScale1 == 1:
|
||||
os.rename('image1.float', 'image1_rect.float')
|
||||
else:
|
||||
rect_with_looks('image1.float',
|
||||
'image1_rect.float',
|
||||
numberOfSamples1, swath1.numberOfLines,
|
||||
numberOfSamplesRect1, numberOfLinesRect1,
|
||||
rangeScale1, 0.0,
|
||||
0.0,azimuthScale1,
|
||||
0.0,0.0,
|
||||
1,1,
|
||||
1,1,
|
||||
'REAL',
|
||||
'Bilinear')
|
||||
os.remove('image1.float')
|
||||
|
||||
#take looks
|
||||
if numberOfAzimuthLooks == 1:
|
||||
os.rename('image1_rect.float', 'image1_look.float')
|
||||
else:
|
||||
data1 = np.fromfile('image1_rect.float', dtype=np.float32).reshape(numberOfLinesRect1, numberOfSamplesRect1)
|
||||
data1 = np.sqrt(multilook(data1**2, numberOfAzimuthLooks, 1))
|
||||
data1.astype(np.float32).tofile('image1_look.float')
|
||||
os.remove('image1_rect.float')
|
||||
create_xml('image1_look.float', numberOfSamplesLook1, numberOfLinesLook1, 'float')
|
||||
|
||||
|
||||
#processing image 2
|
||||
rangeOff2 = 0
|
||||
numberOfSamples2 = int((swath1.startingRange + swath1.rangePixelSize * (swath1.numberOfSamples - 1) - swath2.startingRange) / swath2.rangePixelSize) + 1
|
||||
if numberOfSamples2 > swath2.numberOfSamples:
|
||||
numberOfSamples2 = swath2.numberOfSamples
|
||||
|
||||
numberOfSamplesRect2 = int(numberOfSamples2/rangeScale2)
|
||||
numberOfLinesRect2 = int(swath2.numberOfLines/azimuthScale2)
|
||||
|
||||
numberOfSamplesLook2 = int(numberOfSamplesRect2/1)
|
||||
numberOfLinesLook2 = int(numberOfLinesRect2/numberOfAzimuthLooks)
|
||||
|
||||
#get magnitude image whether complex or not
|
||||
ds = gdal.Open(image2 + '.vrt', gdal.GA_ReadOnly)
|
||||
data = ds.ReadAsArray(rangeOff2, 0, numberOfSamples2, swath2.numberOfLines)
|
||||
ds = None
|
||||
(np.absolute(data)).astype(np.float32).tofile('image2.float')
|
||||
|
||||
#rectify
|
||||
if rangeScale2 == 1 and azimuthScale2 == 1:
|
||||
os.rename('image2.float', 'image2_rect.float')
|
||||
else:
|
||||
rect_with_looks('image2.float',
|
||||
'image2_rect.float',
|
||||
numberOfSamples2, swath2.numberOfLines,
|
||||
numberOfSamplesRect2, numberOfLinesRect2,
|
||||
rangeScale2, 0.0,
|
||||
0.0,azimuthScale2,
|
||||
0.0,0.0,
|
||||
1,1,
|
||||
1,1,
|
||||
'REAL',
|
||||
'Bilinear')
|
||||
os.remove('image2.float')
|
||||
|
||||
#take looks
|
||||
if numberOfAzimuthLooks == 1:
|
||||
os.rename('image2_rect.float', 'image2_look.float')
|
||||
else:
|
||||
data2 = np.fromfile('image2_rect.float', dtype=np.float32).reshape(numberOfLinesRect2, numberOfSamplesRect2)
|
||||
data2 = np.sqrt(multilook(data2**2, numberOfAzimuthLooks, 1))
|
||||
data2.astype(np.float32).tofile('image2_look.float')
|
||||
os.remove('image2_rect.float')
|
||||
create_xml('image2_look.float', numberOfSamplesLook2, numberOfLinesLook2, 'float')
|
||||
|
||||
|
||||
#matching
|
||||
ampcor = Ampcor(name='insarapp_slcs_ampcor')
|
||||
ampcor.configure()
|
||||
|
||||
mMag = isceobj.createImage()
|
||||
mMag.load('image1_look.float.xml')
|
||||
mMag.setAccessMode('read')
|
||||
mMag.createImage()
|
||||
|
||||
sMag = isceobj.createImage()
|
||||
sMag.load('image2_look.float.xml')
|
||||
sMag.setAccessMode('read')
|
||||
sMag.createImage()
|
||||
|
||||
ampcor.setImageDataType1('real')
|
||||
ampcor.setImageDataType2('real')
|
||||
|
||||
ampcor.setReferenceSlcImage(mMag)
|
||||
ampcor.setSecondarySlcImage(sMag)
|
||||
|
||||
#MATCH REGION
|
||||
rgoff = 0
|
||||
azoff = int((swath1.sensingStart - swath2.sensingStart).total_seconds() / swath1.azimuthLineInterval / azimuthScale1 / numberOfAzimuthLooks)
|
||||
#it seems that we cannot use 0, haven't look into the problem
|
||||
if rgoff == 0:
|
||||
rgoff = 1
|
||||
if azoff == 0:
|
||||
azoff = 1
|
||||
firstSample = 1
|
||||
if rgoff < 0:
|
||||
firstSample = int(35 - rgoff)
|
||||
firstLine = 1
|
||||
if azoff < 0:
|
||||
firstLine = int(35 - azoff)
|
||||
ampcor.setAcrossGrossOffset(rgoff)
|
||||
ampcor.setDownGrossOffset(azoff)
|
||||
ampcor.setFirstSampleAcross(firstSample)
|
||||
ampcor.setLastSampleAcross(numberOfSamplesLook1)
|
||||
ampcor.setNumberLocationAcross(20)
|
||||
ampcor.setFirstSampleDown(firstLine)
|
||||
ampcor.setLastSampleDown(numberOfLinesLook1)
|
||||
ampcor.setNumberLocationDown(100)
|
||||
|
||||
#MATCH PARAMETERS
|
||||
ampcor.setWindowSizeWidth(32)
|
||||
ampcor.setWindowSizeHeight(32)
|
||||
#note this is the half width/length of search area, so number of resulting correlation samples: 8*2+1
|
||||
ampcor.setSearchWindowSizeWidth(8)
|
||||
ampcor.setSearchWindowSizeHeight(8)
|
||||
|
||||
#REST OF THE STUFF
|
||||
ampcor.setAcrossLooks(1)
|
||||
ampcor.setDownLooks(1)
|
||||
ampcor.setOversamplingFactor(64)
|
||||
ampcor.setZoomWindowSize(16)
|
||||
#1. The following not set
|
||||
#Matching Scale for Sample/Line Directions (-) = 1. 1.
|
||||
#should add the following in Ampcor.py?
|
||||
#if not set, in this case, Ampcor.py'value is also 1. 1.
|
||||
#ampcor.setScaleFactorX(1.)
|
||||
#ampcor.setScaleFactorY(1.)
|
||||
|
||||
#MATCH THRESHOLDS AND DEBUG DATA
|
||||
#2. The following not set
|
||||
#in roi_pac the value is set to 0 1
|
||||
#in isce the value is set to 0.001 1000.0
|
||||
#SNR and Covariance Thresholds (-) = {s1} {s2}
|
||||
#should add the following in Ampcor?
|
||||
#THIS SHOULD BE THE ONLY THING THAT IS DIFFERENT FROM THAT OF ROI_PAC
|
||||
#ampcor.setThresholdSNR(0)
|
||||
#ampcor.setThresholdCov(1)
|
||||
ampcor.setDebugFlag(False)
|
||||
ampcor.setDisplayFlag(False)
|
||||
|
||||
#in summary, only two things not set which are indicated by 'The following not set' above.
|
||||
|
||||
#run ampcor
|
||||
ampcor.ampcor()
|
||||
offsets = ampcor.getOffsetField()
|
||||
refinedOffsets = cullOffsets(offsets)
|
||||
|
||||
#finalize image, and re-create it
|
||||
#otherwise the file pointer is still at the end of the image
|
||||
mMag.finalizeImage()
|
||||
sMag.finalizeImage()
|
||||
|
||||
os.remove('image1_look.float')
|
||||
os.remove('image1_look.float.vrt')
|
||||
os.remove('image1_look.float.xml')
|
||||
os.remove('image2_look.float')
|
||||
os.remove('image2_look.float.vrt')
|
||||
os.remove('image2_look.float.xml')
|
||||
|
||||
if refinedOffsets != None:
|
||||
rangeOffset, azimuthOffset = meanOffset(refinedOffsets)
|
||||
rangeOffset -= rangeOff1/rangeScale1
|
||||
azimuthOffset *= numberOfAzimuthLooks
|
||||
return (rangeOffset, azimuthOffset)
|
||||
else:
|
||||
return None
|
||||
|
||||
|
||||
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue